[ 452.203400] env[62204]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62204) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 452.203734] env[62204]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62204) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 452.203826] env[62204]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62204) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 452.204176] env[62204]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 452.296195] env[62204]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62204) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 452.305451] env[62204]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.009s {{(pid=62204) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 452.904157] env[62204]: INFO nova.virt.driver [None req-3caae536-88a8-4fd4-93df-8fc19bf59ad0 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 452.975929] env[62204]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 452.976174] env[62204]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 452.976174] env[62204]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62204) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 456.052257] env[62204]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-6477e11c-8f95-4273-835e-48640ffa9ee4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.068083] env[62204]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62204) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 456.068289] env[62204]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-b25b6eb8-f46a-4285-b6ef-a442ecb24599 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.100560] env[62204]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 4e1e2. [ 456.100748] env[62204]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.125s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.101254] env[62204]: INFO nova.virt.vmwareapi.driver [None req-3caae536-88a8-4fd4-93df-8fc19bf59ad0 None None] VMware vCenter version: 7.0.3 [ 456.104568] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5d65c5-93ce-4df7-bc31-38055bacc0a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.121473] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0af5373-3a0e-408d-9d94-0169c685184b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.127141] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bf9f3b-0615-4a10-a577-35f241fdde7a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.133475] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b99ee0c-5f95-4118-a535-edba746c2042 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.146103] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070513cb-f491-47d8-b7a6-a2999551116e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.151656] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2292d1cd-a0f2-48fa-9a59-04067bb0015e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.180895] env[62204]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-203050b9-571d-4851-ac81-69bc317ce7cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 456.185673] env[62204]: DEBUG nova.virt.vmwareapi.driver [None req-3caae536-88a8-4fd4-93df-8fc19bf59ad0 None None] Extension org.openstack.compute already exists. {{(pid=62204) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 456.188278] env[62204]: INFO nova.compute.provider_config [None req-3caae536-88a8-4fd4-93df-8fc19bf59ad0 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 456.691517] env[62204]: DEBUG nova.context [None req-3caae536-88a8-4fd4-93df-8fc19bf59ad0 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),d14804ba-5aeb-47a6-932e-14ddd2e00c59(cell1) {{(pid=62204) load_cells /opt/stack/nova/nova/context.py:464}} [ 456.693742] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 456.693958] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 456.694635] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.695066] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Acquiring lock "d14804ba-5aeb-47a6-932e-14ddd2e00c59" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 456.695259] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Lock "d14804ba-5aeb-47a6-932e-14ddd2e00c59" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 456.696247] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Lock "d14804ba-5aeb-47a6-932e-14ddd2e00c59" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 456.716268] env[62204]: INFO dbcounter [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Registered counter for database nova_cell0 [ 456.724145] env[62204]: INFO dbcounter [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Registered counter for database nova_cell1 [ 456.727507] env[62204]: DEBUG oslo_db.sqlalchemy.engines [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62204) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 456.727855] env[62204]: DEBUG oslo_db.sqlalchemy.engines [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62204) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 456.732570] env[62204]: ERROR nova.db.main.api [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 456.732570] env[62204]: result = function(*args, **kwargs) [ 456.732570] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 456.732570] env[62204]: return func(*args, **kwargs) [ 456.732570] env[62204]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 456.732570] env[62204]: result = fn(*args, **kwargs) [ 456.732570] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 456.732570] env[62204]: return f(*args, **kwargs) [ 456.732570] env[62204]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 456.732570] env[62204]: return db.service_get_minimum_version(context, binaries) [ 456.732570] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 456.732570] env[62204]: _check_db_access() [ 456.732570] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 456.732570] env[62204]: stacktrace = ''.join(traceback.format_stack()) [ 456.732570] env[62204]: [ 456.733548] env[62204]: ERROR nova.db.main.api [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 456.733548] env[62204]: result = function(*args, **kwargs) [ 456.733548] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 456.733548] env[62204]: return func(*args, **kwargs) [ 456.733548] env[62204]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 456.733548] env[62204]: result = fn(*args, **kwargs) [ 456.733548] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 456.733548] env[62204]: return f(*args, **kwargs) [ 456.733548] env[62204]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 456.733548] env[62204]: return db.service_get_minimum_version(context, binaries) [ 456.733548] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 456.733548] env[62204]: _check_db_access() [ 456.733548] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 456.733548] env[62204]: stacktrace = ''.join(traceback.format_stack()) [ 456.733548] env[62204]: [ 456.734019] env[62204]: WARNING nova.objects.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Failed to get minimum service version for cell d14804ba-5aeb-47a6-932e-14ddd2e00c59 [ 456.734101] env[62204]: WARNING nova.objects.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 456.734489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Acquiring lock "singleton_lock" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 456.734644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Acquired lock "singleton_lock" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 456.734884] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Releasing lock "singleton_lock" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 456.735210] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Full set of CONF: {{(pid=62204) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 456.735352] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ******************************************************************************** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 456.735477] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Configuration options gathered from: {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 456.735617] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 456.735811] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 456.735938] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ================================================================================ {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 456.736159] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] allow_resize_to_same_host = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.736330] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] arq_binding_timeout = 300 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.736460] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] backdoor_port = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.736588] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] backdoor_socket = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.736749] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] block_device_allocate_retries = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.736910] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] block_device_allocate_retries_interval = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.737096] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cert = self.pem {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.737267] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.737434] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute_monitors = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.737600] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] config_dir = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.737768] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] config_drive_format = iso9660 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.737901] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.738085] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] config_source = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.738262] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] console_host = devstack {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.738416] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] control_exchange = nova {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.738572] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cpu_allocation_ratio = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.738740] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] daemon = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.738884] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] debug = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.739047] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] default_access_ip_network_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.739213] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] default_availability_zone = nova {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.739368] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] default_ephemeral_format = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.739523] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] default_green_pool_size = 1000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.739752] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.739914] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] default_schedule_zone = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.740085] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] disk_allocation_ratio = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.740246] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] enable_new_services = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.740418] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] enabled_apis = ['osapi_compute'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.740579] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] enabled_ssl_apis = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.740738] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] flat_injected = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.740894] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] force_config_drive = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.741071] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] force_raw_images = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.741245] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] graceful_shutdown_timeout = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.741403] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] heal_instance_info_cache_interval = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.741615] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] host = cpu-1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.741800] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.741954] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.742133] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.742345] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.742510] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_build_timeout = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.742671] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_delete_interval = 300 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.742837] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_format = [instance: %(uuid)s] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.743012] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_name_template = instance-%08x {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.743184] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_usage_audit = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.743357] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_usage_audit_period = month {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.743520] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.743684] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.743849] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] internal_service_availability_zone = internal {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744013] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] key = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744181] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] live_migration_retry_count = 30 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744366] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_color = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744515] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_config_append = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744679] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744837] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_dir = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.744994] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.745139] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_options = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.745302] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_rotate_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.745469] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_rotate_interval_type = days {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.745633] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] log_rotation_type = none {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.745761] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.745885] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746061] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746232] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746359] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746525] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] long_rpc_timeout = 1800 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746686] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] max_concurrent_builds = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746842] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] max_concurrent_live_migrations = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.746999] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] max_concurrent_snapshots = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.747171] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] max_local_block_devices = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.747330] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] max_logfile_count = 30 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.747487] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] max_logfile_size_mb = 200 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.747643] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] maximum_instance_delete_attempts = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.747809] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metadata_listen = 0.0.0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.747975] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metadata_listen_port = 8775 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.748155] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metadata_workers = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.748317] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] migrate_max_retries = -1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.748481] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] mkisofs_cmd = genisoimage {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.748685] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.748857] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] my_ip = 10.180.1.21 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.748970] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] network_allocate_retries = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.749157] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.749324] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.749485] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] osapi_compute_listen_port = 8774 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.749650] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] osapi_compute_unique_server_name_scope = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.749812] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] osapi_compute_workers = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.749972] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] password_length = 12 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.750144] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] periodic_enable = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.750302] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] periodic_fuzzy_delay = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.750466] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] pointer_model = usbtablet {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.750630] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] preallocate_images = none {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.750785] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] publish_errors = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.750911] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] pybasedir = /opt/stack/nova {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.751077] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ram_allocation_ratio = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.751239] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] rate_limit_burst = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.751404] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] rate_limit_except_level = CRITICAL {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.751560] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] rate_limit_interval = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.751714] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reboot_timeout = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.751868] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reclaim_instance_interval = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.752029] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] record = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.752210] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reimage_timeout_per_gb = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.752380] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] report_interval = 120 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.752539] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] rescue_timeout = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.752697] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reserved_host_cpus = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.752850] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reserved_host_disk_mb = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753010] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reserved_host_memory_mb = 512 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753176] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] reserved_huge_pages = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753334] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] resize_confirm_window = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753487] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] resize_fs_using_block_device = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753639] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] resume_guests_state_on_host_boot = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753803] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.753959] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] rpc_response_timeout = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.754129] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] run_external_periodic_tasks = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.754297] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] running_deleted_instance_action = reap {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.754454] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.754610] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] running_deleted_instance_timeout = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.754766] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler_instance_sync_interval = 120 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.754931] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_down_time = 720 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.755107] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] servicegroup_driver = db {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.755265] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] shell_completion = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.755422] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] shelved_offload_time = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.755578] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] shelved_poll_interval = 3600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.755743] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] shutdown_timeout = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.755901] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] source_is_ipv6 = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.756067] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ssl_only = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.756322] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.756487] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] sync_power_state_interval = 600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.756645] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] sync_power_state_pool_size = 1000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.756811] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] syslog_log_facility = LOG_USER {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.756965] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] tempdir = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.757135] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] timeout_nbd = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.757303] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] transport_url = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.757461] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] update_resources_interval = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.757617] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_cow_images = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.757772] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_eventlog = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.757926] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_journal = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.758092] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_json = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.758250] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_rootwrap_daemon = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.758404] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_stderr = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.758557] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] use_syslog = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.758760] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vcpu_pin_set = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.758962] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plugging_is_fatal = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.759034] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plugging_timeout = 300 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.759199] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] virt_mkfs = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.759358] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] volume_usage_poll_interval = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.759514] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] watch_log_file = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.759678] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] web = /usr/share/spice-html5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 456.759860] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.760035] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.760200] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.760369] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_concurrency.disable_process_locking = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.760943] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.761149] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.761326] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.761498] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.761670] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.761836] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.762028] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.auth_strategy = keystone {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.762206] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.compute_link_prefix = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.762382] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.762557] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.dhcp_domain = novalocal {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.762724] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.enable_instance_password = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.762890] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.glance_link_prefix = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.763069] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.763250] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.763414] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.instance_list_per_project_cells = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.763577] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.list_records_by_skipping_down_cells = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.763738] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.local_metadata_per_cell = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.763907] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.max_limit = 1000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.764089] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.metadata_cache_expiration = 15 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.764269] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.neutron_default_tenant_id = default {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.764442] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.response_validation = warn {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.764623] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.use_neutron_default_nets = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.764794] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.764954] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.765136] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.765311] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.765479] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_dynamic_targets = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.765641] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_jsonfile_path = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.765823] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.766026] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.backend = dogpile.cache.memcached {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.766204] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.backend_argument = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.766378] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.config_prefix = cache.oslo {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.766548] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.dead_timeout = 60.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.766711] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.debug_cache_backend = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.766871] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.enable_retry_client = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.767043] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.enable_socket_keepalive = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.767220] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.enabled = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.767385] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.enforce_fips_mode = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.767546] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.expiration_time = 600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.767709] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.hashclient_retry_attempts = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.767875] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.768049] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_dead_retry = 300 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.768214] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_password = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.768377] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.768539] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.768700] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_pool_maxsize = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.768876] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.769071] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_sasl_enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.769204] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.769371] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.769529] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.memcache_username = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.769693] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.proxies = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.769855] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_db = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.770025] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_password = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.770203] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.770381] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.770549] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_server = localhost:6379 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.770712] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_socket_timeout = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.770872] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.redis_username = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.771045] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.retry_attempts = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.771216] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.retry_delay = 0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.771381] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.socket_keepalive_count = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.771542] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.socket_keepalive_idle = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.771702] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.socket_keepalive_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.771858] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.tls_allowed_ciphers = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.772023] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.tls_cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.772188] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.tls_certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.772349] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.tls_enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.772504] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cache.tls_keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.772672] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.772844] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.auth_type = password {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773018] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773196] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773359] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773520] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773680] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.cross_az_attach = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773839] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.debug = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.773998] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.endpoint_template = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.774176] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.http_retries = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.774340] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.774498] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.774668] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.os_region_name = RegionOne {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.774833] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.774991] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cinder.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.775184] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.775342] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.cpu_dedicated_set = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.775500] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.cpu_shared_set = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.775667] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.image_type_exclude_list = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.775830] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.775992] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.776170] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.776334] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.776501] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.776662] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.resource_provider_association_refresh = 300 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.776823] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.776981] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.shutdown_retry_interval = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.777177] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.777357] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] conductor.workers = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.777533] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] console.allowed_origins = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.777692] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] console.ssl_ciphers = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.777863] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] console.ssl_minimum_version = default {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.778041] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] consoleauth.enforce_session_timeout = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.778216] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] consoleauth.token_ttl = 600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.778391] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.778555] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.778718] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.778874] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779061] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779198] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779358] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779518] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779676] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779830] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.779986] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.region_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.780158] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.780317] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.780483] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.service_type = accelerator {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.780644] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.780800] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.780953] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.781125] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.781309] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.781469] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] cyborg.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.781646] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.backend = sqlalchemy {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.781814] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.connection = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.781977] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.connection_debug = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.782165] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.connection_parameters = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.782330] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.connection_recycle_time = 3600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.782493] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.connection_trace = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.782654] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.db_inc_retry_interval = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.782817] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.db_max_retries = 20 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.782979] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.db_max_retry_interval = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.783154] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.db_retry_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.783316] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.max_overflow = 50 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.783475] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.max_pool_size = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.783632] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.max_retries = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.783800] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.783955] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.mysql_wsrep_sync_wait = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.784124] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.pool_timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.784287] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.retry_interval = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.784443] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.slave_connection = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.784602] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.sqlite_synchronous = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.784759] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] database.use_db_reconnect = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.784934] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.backend = sqlalchemy {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.785114] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.connection = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.785284] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.connection_debug = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.785449] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.connection_parameters = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.785608] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.connection_recycle_time = 3600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.785770] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.connection_trace = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.785928] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.db_inc_retry_interval = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.786101] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.db_max_retries = 20 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.786267] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.db_max_retry_interval = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.786428] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.db_retry_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.786587] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.max_overflow = 50 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.786745] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.max_pool_size = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.786904] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.max_retries = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.787081] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.787244] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.787402] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.pool_timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.787563] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.retry_interval = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.787720] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.slave_connection = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.787877] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] api_database.sqlite_synchronous = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.788061] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] devices.enabled_mdev_types = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.788242] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.788414] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.788578] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ephemeral_storage_encryption.enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.788743] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.788913] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.api_servers = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.789092] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.789294] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.789419] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.789579] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.789737] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.789900] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.debug = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.790078] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.default_trusted_certificate_ids = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.790247] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.enable_certificate_validation = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.790410] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.enable_rbd_download = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.790570] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.790733] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.790895] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.791069] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.791234] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.791397] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.num_retries = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.791567] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.rbd_ceph_conf = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.791729] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.rbd_connect_timeout = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.791898] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.rbd_pool = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.792079] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.rbd_user = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.792240] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.region_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.792398] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.792555] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.792724] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.service_type = image {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.792886] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.793066] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.793228] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.793385] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.793566] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.793728] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.verify_glance_signatures = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.793886] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] glance.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.794063] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] guestfs.debug = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.794235] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] mks.enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.794587] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.794777] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] image_cache.manager_interval = 2400 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.794946] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] image_cache.precache_concurrency = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.795128] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] image_cache.remove_unused_base_images = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.795300] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.795470] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.795644] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] image_cache.subdirectory_name = _base {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.795820] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.api_max_retries = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.795983] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.api_retry_interval = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.796158] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.796322] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.auth_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.796483] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.796641] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.796805] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.796967] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.conductor_group = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.797140] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.797302] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.797458] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.797620] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.797776] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.797934] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.798104] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.798273] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.peer_list = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.798431] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.region_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.798588] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.798750] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.serial_console_state_timeout = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.798910] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.799088] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.service_type = baremetal {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.799251] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.shard = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.799418] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.799575] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.799731] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.799889] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.800081] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.800246] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ironic.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.800427] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.800599] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] key_manager.fixed_key = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.800780] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.800940] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.barbican_api_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.801110] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.barbican_endpoint = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.801284] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.barbican_endpoint_type = public {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.801441] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.barbican_region_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.801599] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.801756] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.801916] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.802111] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.802268] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.802431] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.number_of_retries = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.802589] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.retry_delay = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.802750] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.send_service_user_token = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.802909] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.803129] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.803245] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.verify_ssl = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.803411] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican.verify_ssl_path = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.803593] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.803781] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.auth_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.803915] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.804096] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.804272] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.804433] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.804600] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.804768] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.804926] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] barbican_service_user.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.805116] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.approle_role_id = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.805293] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.approle_secret_id = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.805485] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.kv_mountpoint = secret {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.805651] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.kv_path = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.805816] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.kv_version = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.805993] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.namespace = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.806166] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.root_token_id = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.806325] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.ssl_ca_crt_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.806507] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.timeout = 60.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.806674] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.use_ssl = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.806846] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.807042] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.807212] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.auth_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.807378] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.807553] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.807713] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.807882] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.808061] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.808226] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.808401] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.808571] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.808746] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.808909] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.809079] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.region_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.809368] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.809435] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.809593] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.service_type = identity {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.809760] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.809921] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.810099] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.810277] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.810462] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.810645] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] keystone.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.810864] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.connection_uri = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.811031] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_mode = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.811212] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.811383] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_models = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.811555] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_power_governor_high = performance {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.811720] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.811880] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_power_management = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.812095] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.812228] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.device_detach_attempts = 8 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.812391] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.device_detach_timeout = 20 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.812553] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.disk_cachemodes = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.812709] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.disk_prefix = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.812869] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.enabled_perf_events = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.813042] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.file_backed_memory = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.813210] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.gid_maps = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.813367] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.hw_disk_discard = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.813520] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.hw_machine_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.813686] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_rbd_ceph_conf = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.813849] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.814016] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.814202] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_rbd_glance_store_name = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.814377] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_rbd_pool = rbd {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.814546] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_type = default {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.814701] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.images_volume_group = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.814859] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.inject_key = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815028] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.inject_partition = -2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815193] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.inject_password = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815354] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.iscsi_iface = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815512] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.iser_use_multipath = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815672] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815828] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.815987] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_downtime = 500 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.816166] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.816329] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.816490] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_inbound_addr = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.816649] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.816812] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.816971] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_scheme = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.817158] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_timeout_action = abort {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.817320] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_tunnelled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.817477] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_uri = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.817635] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.live_migration_with_native_tls = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.817792] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.max_queues = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.817948] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.818201] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.818363] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.nfs_mount_options = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.818659] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.818829] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.818991] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.819164] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.819328] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.819494] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.num_pcie_ports = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.819664] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.819802] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.pmem_namespaces = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.819974] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.quobyte_client_cfg = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.820264] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.820439] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.820601] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.820764] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.820923] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rbd_secret_uuid = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.821102] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rbd_user = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.821279] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.821451] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.821611] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rescue_image_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.821767] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rescue_kernel_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.821921] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rescue_ramdisk_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.822105] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.822263] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.rx_queue_size = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.822426] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.smbfs_mount_options = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.822697] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.822860] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.snapshot_compression = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.823026] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.snapshot_image_format = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.823240] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.823405] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.sparse_logical_volumes = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.823563] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.swtpm_enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.823727] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.swtpm_group = tss {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.823889] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.swtpm_user = tss {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.824069] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.sysinfo_serial = unique {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.824228] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.tb_cache_size = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.824381] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.tx_queue_size = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.824565] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.uid_maps = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.824737] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.use_virtio_for_bridges = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.824904] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.virt_type = kvm {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.825092] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.volume_clear = zero {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.825265] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.volume_clear_size = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.825427] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.volume_use_multipath = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.825583] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_cache_path = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.825747] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.825914] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.826089] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.826262] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.826533] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.826709] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.vzstorage_mount_user = stack {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.826875] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.827058] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.827235] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.auth_type = password {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.827392] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.827547] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.827704] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.827855] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828014] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828187] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.default_floating_pool = public {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828345] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828505] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.extension_sync_interval = 600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828651] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.http_retries = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828806] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.828960] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.829126] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.829296] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.829450] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.829621] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.ovs_bridge = br-int {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.829772] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.physnets = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.829970] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.region_name = RegionOne {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.830102] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.830269] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.service_metadata_proxy = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.830424] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.830586] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.service_type = network {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.830745] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.830900] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.831063] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.831224] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.831398] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.831550] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] neutron.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.831715] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] notifications.bdms_in_notifications = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.831886] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] notifications.default_level = INFO {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.832133] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] notifications.notification_format = unversioned {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.832232] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] notifications.notify_on_state_change = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.832397] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.832569] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] pci.alias = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.832794] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] pci.device_spec = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.832989] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] pci.report_in_placement = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.833203] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.833382] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.auth_type = password {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.833547] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.833703] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.833856] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834025] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834185] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834339] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834492] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.default_domain_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834645] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.default_domain_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834804] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.domain_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.834969] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.domain_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.835156] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.835322] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.835479] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.835654] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.835823] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.835992] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.password = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.836163] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.project_domain_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.836328] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.project_domain_name = Default {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.836493] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.project_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.836672] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.project_name = service {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.836845] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.region_name = RegionOne {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.837012] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.837180] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.837348] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.service_type = placement {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.837521] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.837684] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.837848] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.838037] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.system_scope = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.838210] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.838373] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.trust_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.838668] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.user_domain_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.838709] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.user_domain_name = Default {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.838855] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.user_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.839034] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.username = nova {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.839221] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.839379] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] placement.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.839551] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.cores = 20 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.839759] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.count_usage_from_placement = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.839891] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.840048] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.injected_file_content_bytes = 10240 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.840218] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.injected_file_path_length = 255 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.840384] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.injected_files = 5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.840547] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.instances = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.840710] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.key_pairs = 100 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.840899] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.metadata_items = 128 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.841077] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.ram = 51200 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.841243] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.recheck_quota = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.841411] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.server_group_members = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.841588] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] quota.server_groups = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.841774] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.841943] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.842138] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.image_metadata_prefilter = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.842308] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.842488] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.max_attempts = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.842657] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.max_placement_results = 1000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.842821] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.843041] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.843225] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.843430] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] scheduler.workers = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.843621] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.843798] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.843986] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.844181] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.844362] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.844536] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.844709] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.844903] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.845087] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.host_subset_size = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.845263] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.845433] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.845602] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.845776] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.isolated_hosts = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.845947] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.isolated_images = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.846153] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.846333] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.846499] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.846660] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.pci_in_placement = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.846820] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.846980] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.847169] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.847338] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.847505] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.847675] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.847840] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.track_instance_changes = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.848029] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.848208] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metrics.required = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.848374] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metrics.weight_multiplier = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.848553] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.848724] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] metrics.weight_setting = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.849118] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.849241] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] serial_console.enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.849417] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] serial_console.port_range = 10000:20000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.849597] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.849815] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.849993] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] serial_console.serialproxy_port = 6083 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.850137] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.850328] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.auth_type = password {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.850491] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.850653] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.850821] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.850991] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.851177] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.851353] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.send_service_user_token = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.851527] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.851703] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] service_user.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.851874] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.agent_enabled = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.852054] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.852377] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.852580] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.852757] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.html5proxy_port = 6082 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.852926] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.image_compression = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.853114] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.jpeg_compression = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.853280] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.playback_compression = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.853441] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.require_secure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.853628] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.server_listen = 127.0.0.1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.853801] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.853971] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.streaming_mode = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.854151] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] spice.zlib_compression = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.854320] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] upgrade_levels.baseapi = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.854487] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] upgrade_levels.compute = auto {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.854662] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] upgrade_levels.conductor = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.854828] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] upgrade_levels.scheduler = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.855014] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.855194] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.855372] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.855938] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.855938] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.855938] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.856068] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.856191] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.856367] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vendordata_dynamic_auth.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.856556] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.api_retry_count = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.856725] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.ca_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.856897] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.857084] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.cluster_name = testcl1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.857260] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.connection_pool_size = 10 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.857485] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.console_delay_seconds = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.857615] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.datastore_regex = ^datastore.* {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.857851] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.858050] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.host_password = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.858231] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.host_port = 443 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.858402] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.host_username = administrator@vsphere.local {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.858568] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.insecure = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.858726] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.integration_bridge = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.858896] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.maximum_objects = 100 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.859083] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.pbm_default_policy = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.859254] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.pbm_enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.859427] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.pbm_wsdl_location = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.859613] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.859776] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.serial_port_proxy_uri = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.859930] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.serial_port_service_uri = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.860160] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.task_poll_interval = 0.5 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.860276] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.use_linked_clone = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.860442] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.vnc_keymap = en-us {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.860606] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.vnc_port = 5900 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.860771] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vmware.vnc_port_total = 10000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.860958] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.auth_schemes = ['none'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.861146] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.861476] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.861667] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862255] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.novncproxy_port = 6080 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862255] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.server_listen = 127.0.0.1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862255] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862361] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.vencrypt_ca_certs = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862525] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.vencrypt_client_cert = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862714] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vnc.vencrypt_client_key = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.862942] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.863131] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.disable_deep_image_inspection = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.863296] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.863461] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.863636] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.863803] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.disable_rootwrap = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.863962] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.enable_numa_live_migration = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.864141] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.864311] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.864471] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.864635] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.libvirt_disable_apic = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.864795] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.864963] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.865144] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.865314] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.865474] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.865645] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.865819] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.865980] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.866165] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.866342] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.866531] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.866699] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.client_socket_timeout = 900 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.866861] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.default_pool_size = 1000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.867051] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.keep_alive = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.867226] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.max_header_line = 16384 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.867385] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.867561] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.ssl_ca_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.867726] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.ssl_cert_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.867884] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.ssl_key_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.868074] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.tcp_keepidle = 600 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.868276] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.868445] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] zvm.ca_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.868615] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] zvm.cloud_connector_url = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.868942] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.869149] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] zvm.reachable_timeout = 300 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.869338] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.enforce_new_defaults = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.869989] env[62204]: WARNING oslo_config.cfg [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 456.869989] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.enforce_scope = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.870227] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.policy_default_rule = default {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.870437] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.870530] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.policy_file = policy.yaml {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.870700] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.870872] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.871048] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.871215] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.871383] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.871568] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.871751] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.871935] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.connection_string = messaging:// {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.872132] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.enabled = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.872358] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.es_doc_type = notification {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.872537] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.es_scroll_size = 10000 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.872721] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.es_scroll_time = 2m {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.872893] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.filter_error_trace = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.873105] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.hmac_keys = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.873293] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.sentinel_service_name = mymaster {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.873466] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.socket_timeout = 0.1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.873642] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.trace_requests = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.873812] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler.trace_sqlalchemy = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.873994] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler_jaeger.process_tags = {} {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.874210] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler_jaeger.service_name_prefix = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.874396] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] profiler_otlp.service_name_prefix = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.874575] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] remote_debug.host = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.874763] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] remote_debug.port = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.874978] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.875199] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.875393] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.875570] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876178] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876178] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876178] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876280] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876391] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876565] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876732] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.876914] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.877101] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.877278] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.877454] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.877626] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.877799] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.877980] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.878163] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.878333] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.878499] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.878666] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.878829] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.879040] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.879224] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.879390] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.879555] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.879722] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.879894] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.880075] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.ssl = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.880303] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.880540] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.880628] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.880827] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.881024] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.881204] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.881399] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.881586] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_notifications.retry = -1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.881800] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.882014] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.882540] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.auth_section = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.882540] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.auth_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.882540] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.cafile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.882696] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.certfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.882860] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.collect_timing = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.883030] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.connect_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.883199] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.connect_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.883360] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.endpoint_id = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.883618] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.endpoint_override = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.883715] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.insecure = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884068] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.keyfile = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884068] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.max_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884209] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.min_version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884364] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.region_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884527] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.retriable_status_codes = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884685] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.service_name = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.884996] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.service_type = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.885077] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.split_loggers = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.885224] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.status_code_retries = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.885385] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.status_code_retry_delay = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.885545] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.timeout = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.885704] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.valid_interfaces = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.885865] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_limit.version = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.886041] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_reports.file_event_handler = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.886221] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.886383] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] oslo_reports.log_dir = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.886558] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.886719] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.886875] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.887055] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.887231] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.887392] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.887565] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.887722] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_ovs_privileged.group = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.887879] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.888053] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.888225] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.888417] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] vif_plug_ovs_privileged.user = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.888607] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.888808] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.888997] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.889189] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.889368] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.889560] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.889741] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.889916] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.890711] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.890711] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.isolate_vif = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.890711] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.890711] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.890850] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891096] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891155] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] os_vif_ovs.per_port_bridge = False {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891298] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] privsep_osbrick.capabilities = [21] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891458] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] privsep_osbrick.group = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891617] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] privsep_osbrick.helper_command = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891777] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.891964] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.892156] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] privsep_osbrick.user = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.892342] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.892507] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] nova_sys_admin.group = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.892666] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] nova_sys_admin.helper_command = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.892830] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.892997] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.893171] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] nova_sys_admin.user = None {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 456.893305] env[62204]: DEBUG oslo_service.service [None req-83204bb1-3808-4ab1-bbdb-413b428e9213 None None] ******************************************************************************** {{(pid=62204) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 456.893816] env[62204]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 457.397388] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Getting list of instances from cluster (obj){ [ 457.397388] env[62204]: value = "domain-c8" [ 457.397388] env[62204]: _type = "ClusterComputeResource" [ 457.397388] env[62204]: } {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 457.398555] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc97554-f3ac-4aa1-b9c8-fa2b1850f394 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 457.407430] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Got total of 0 instances {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 457.407957] env[62204]: WARNING nova.virt.vmwareapi.driver [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 457.408444] env[62204]: INFO nova.virt.node [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Generated node identity 92e8f362-5134-40c6-9a5c-0b8f64197972 [ 457.408797] env[62204]: INFO nova.virt.node [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Wrote node identity 92e8f362-5134-40c6-9a5c-0b8f64197972 to /opt/stack/data/n-cpu-1/compute_id [ 457.911831] env[62204]: WARNING nova.compute.manager [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Compute nodes ['92e8f362-5134-40c6-9a5c-0b8f64197972'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 458.918404] env[62204]: INFO nova.compute.manager [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 459.927021] env[62204]: WARNING nova.compute.manager [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 459.927021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.927021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 459.927021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 459.927021] env[62204]: DEBUG nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 459.927021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69bbfe52-e382-4951-a430-8b67b6ef87d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.936164] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe96086-a152-45c8-9007-acdc1dc85240 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.950520] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d64136-2a37-4b9f-86b9-b1005c3565fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.957416] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b44e43c-e140-4652-9f9a-13f08ff39866 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 459.989338] env[62204]: DEBUG nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181423MB free_disk=157GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 459.989657] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 459.989944] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 460.498019] env[62204]: WARNING nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] No compute node record for cpu-1:92e8f362-5134-40c6-9a5c-0b8f64197972: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 92e8f362-5134-40c6-9a5c-0b8f64197972 could not be found. [ 461.001826] env[62204]: INFO nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 92e8f362-5134-40c6-9a5c-0b8f64197972 [ 462.512099] env[62204]: DEBUG nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 462.512099] env[62204]: DEBUG nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 462.668176] env[62204]: INFO nova.scheduler.client.report [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] [req-78bc01eb-c2c9-44aa-8dca-aced7be2bd1b] Created resource provider record via placement API for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 462.685027] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e974c338-19e3-45cc-b9e6-113d32da5e14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.691185] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3de7481-157a-4077-ad12-4e9b7837897c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.720790] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b537d54-7cae-4c83-a7c1-a23066f56021 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.727487] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa58956-97bc-4760-a678-9c93d721ee24 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 462.740059] env[62204]: DEBUG nova.compute.provider_tree [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 463.279037] env[62204]: DEBUG nova.scheduler.client.report [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 463.279037] env[62204]: DEBUG nova.compute.provider_tree [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 0 to 1 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 463.279037] env[62204]: DEBUG nova.compute.provider_tree [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 463.330988] env[62204]: DEBUG nova.compute.provider_tree [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 1 to 2 during operation: update_traits {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 463.837902] env[62204]: DEBUG nova.compute.resource_tracker [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 463.837902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.846s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 463.837902] env[62204]: DEBUG nova.service [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Creating RPC server for service compute {{(pid=62204) start /opt/stack/nova/nova/service.py:186}} [ 463.851365] env[62204]: DEBUG nova.service [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] Join ServiceGroup membership for this service compute {{(pid=62204) start /opt/stack/nova/nova/service.py:203}} [ 463.851786] env[62204]: DEBUG nova.servicegroup.drivers.db [None req-cee25d8d-5b75-42c8-ad51-ae575368670f None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62204) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 495.082376] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "91eacc12-5026-4f59-bf2c-babff6c8d42f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.083174] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "91eacc12-5026-4f59-bf2c-babff6c8d42f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.587329] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 496.124077] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 496.124077] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 496.125891] env[62204]: INFO nova.compute.claims [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 497.183640] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ad1d81-518e-4410-b7d9-b53be877d062 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.192648] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf3b22d-f02a-4bd0-b7d2-e9996deb028a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.228215] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962e0568-52d7-492f-8dde-c2f9cee849b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.239195] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05d9066-4257-440b-b462-95ab2f60d9be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.256124] env[62204]: DEBUG nova.compute.provider_tree [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 497.759502] env[62204]: DEBUG nova.scheduler.client.report [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 498.268622] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 498.269631] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 498.775317] env[62204]: DEBUG nova.compute.utils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 498.777522] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 498.777831] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 499.283846] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 499.985425] env[62204]: DEBUG nova.policy [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b6485d1a706401e8c7d21bce1a13579', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd69ddf730fff40a98f43a3b13dd30e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 500.300908] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 500.336170] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 500.337187] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 500.337187] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 500.337187] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 500.337187] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 500.337753] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 500.338227] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 500.339033] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 500.339655] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 500.342190] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 500.342413] env[62204]: DEBUG nova.virt.hardware [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 500.347031] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7de1d6-5bb5-4d38-b63d-9fea1bfa5262 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.357019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1460d8c0-3f02-434b-ba14-50843731070d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 500.375251] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a22ccd-32a8-4aea-89c2-d685cb583ffb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 501.687482] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Successfully created port: a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 503.558657] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquiring lock "8b6abe21-275f-474d-801d-b94627e8e832" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.558960] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Lock "8b6abe21-275f-474d-801d-b94627e8e832" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.061576] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 504.185545] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "319eed10-3985-45c3-b864-7c984a8b9819" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.185774] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "319eed10-3985-45c3-b864-7c984a8b9819" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.589833] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.590366] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 504.592584] env[62204]: INFO nova.compute.claims [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 504.690472] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 504.983710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquiring lock "fdb9ae1a-a561-475c-9e13-803765c21582" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 504.983710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Lock "fdb9ae1a-a561-475c-9e13-803765c21582" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 505.227139] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 505.489387] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 505.716575] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c81655-f2dc-4616-af07-1811416e79dc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.727894] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df19d73a-cc6a-4c26-934f-d7d331ff9e2e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.766123] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f6e238-7508-4821-ae0c-2c2819e7a459 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.774255] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9f85e7-f8fd-4700-bee4-8783a58a61ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 505.788953] env[62204]: DEBUG nova.compute.provider_tree [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 506.020909] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.154946] env[62204]: ERROR nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 506.154946] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 506.154946] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 506.154946] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 506.154946] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 506.154946] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 506.154946] env[62204]: ERROR nova.compute.manager raise self.value [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 506.154946] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 506.154946] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 506.154946] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 506.155670] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 506.155670] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 506.155670] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 506.155670] env[62204]: ERROR nova.compute.manager [ 506.155670] env[62204]: Traceback (most recent call last): [ 506.155670] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 506.155670] env[62204]: listener.cb(fileno) [ 506.155670] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 506.155670] env[62204]: result = function(*args, **kwargs) [ 506.155670] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 506.155670] env[62204]: return func(*args, **kwargs) [ 506.155670] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 506.155670] env[62204]: raise e [ 506.155670] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 506.155670] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 506.155670] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 506.155670] env[62204]: created_port_ids = self._update_ports_for_instance( [ 506.155670] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 506.155670] env[62204]: with excutils.save_and_reraise_exception(): [ 506.155670] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 506.155670] env[62204]: self.force_reraise() [ 506.155670] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 506.155670] env[62204]: raise self.value [ 506.155670] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 506.155670] env[62204]: updated_port = self._update_port( [ 506.155670] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 506.155670] env[62204]: _ensure_no_port_binding_failure(port) [ 506.155670] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 506.155670] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 506.156549] env[62204]: nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 506.156549] env[62204]: Removing descriptor: 14 [ 506.159240] env[62204]: ERROR nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Traceback (most recent call last): [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] yield resources [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self.driver.spawn(context, instance, image_meta, [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] vm_ref = self.build_virtual_machine(instance, [ 506.159240] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] vif_infos = vmwarevif.get_vif_info(self._session, [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] for vif in network_info: [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return self._sync_wrapper(fn, *args, **kwargs) [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self.wait() [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self[:] = self._gt.wait() [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return self._exit_event.wait() [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 506.159663] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] result = hub.switch() [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return self.greenlet.switch() [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] result = function(*args, **kwargs) [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return func(*args, **kwargs) [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] raise e [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] nwinfo = self.network_api.allocate_for_instance( [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] created_port_ids = self._update_ports_for_instance( [ 506.160882] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] with excutils.save_and_reraise_exception(): [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self.force_reraise() [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] raise self.value [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] updated_port = self._update_port( [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] _ensure_no_port_binding_failure(port) [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] raise exception.PortBindingFailed(port_id=port['id']) [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 506.161332] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] [ 506.161663] env[62204]: INFO nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Terminating instance [ 506.166025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 506.166025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquired lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 506.166025] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 506.295895] env[62204]: DEBUG nova.scheduler.client.report [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 506.739686] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 506.804714] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.806363] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 506.811760] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.585s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.813258] env[62204]: INFO nova.compute.claims [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 507.058200] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.058200] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.136177] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 507.319289] env[62204]: DEBUG nova.compute.utils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 507.320809] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 507.321949] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 507.473721] env[62204]: DEBUG nova.policy [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7394aac53aea4e5ebd3b7498b260b77e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd7f80585ce2843a3b444a2456675ee94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 507.508827] env[62204]: DEBUG nova.compute.manager [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Received event network-changed-a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 507.509031] env[62204]: DEBUG nova.compute.manager [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Refreshing instance network info cache due to event network-changed-a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 507.509216] env[62204]: DEBUG oslo_concurrency.lockutils [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] Acquiring lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 507.564150] env[62204]: DEBUG nova.compute.manager [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 507.640243] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Releasing lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 507.640777] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 507.640777] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 507.641460] env[62204]: DEBUG oslo_concurrency.lockutils [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] Acquired lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 507.641460] env[62204]: DEBUG nova.network.neutron [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Refreshing network info cache for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 507.642289] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b357753a-1dc4-4ea5-91a6-35e2c30b9670 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.654572] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d41b75-4e4f-4bbc-a086-5ed68b022101 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.679018] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 91eacc12-5026-4f59-bf2c-babff6c8d42f could not be found. [ 507.679018] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 507.679018] env[62204]: INFO nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 507.679018] env[62204]: DEBUG oslo.service.loopingcall [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 507.679204] env[62204]: DEBUG nova.compute.manager [-] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 507.679204] env[62204]: DEBUG nova.network.neutron [-] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 507.732710] env[62204]: DEBUG nova.network.neutron [-] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 507.826806] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 507.889600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquiring lock "04f9b938-7bcd-4b49-9115-0693fb478326" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.893023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Lock "04f9b938-7bcd-4b49-9115-0693fb478326" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.987286] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6561798-8041-42a9-8458-467b066a721c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.995571] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a43f731-d1fe-4cda-9a63-0925cf2e4ca8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.035604] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4785607f-82c7-4782-af1e-9d583b1dcf55 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.050386] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e08902b-894b-4ee0-b7ac-844bdf6d1aa4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.072301] env[62204]: DEBUG nova.compute.provider_tree [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 508.101374] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.236397] env[62204]: DEBUG nova.network.neutron [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 508.239981] env[62204]: DEBUG nova.network.neutron [-] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 508.371425] env[62204]: DEBUG nova.network.neutron [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 508.399581] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 508.576407] env[62204]: DEBUG nova.scheduler.client.report [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 508.598299] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Successfully created port: dec4a747-07b3-4b9c-ac2b-32a7106619a4 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 508.746548] env[62204]: INFO nova.compute.manager [-] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Took 1.07 seconds to deallocate network for instance. [ 508.749535] env[62204]: DEBUG nova.compute.claims [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 508.749810] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 508.840422] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 508.875865] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 508.875967] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 508.877030] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 508.877030] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 508.877030] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 508.877268] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 508.877364] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 508.877526] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 508.879018] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 508.879018] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 508.879018] env[62204]: DEBUG nova.virt.hardware [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 508.879018] env[62204]: DEBUG oslo_concurrency.lockutils [req-0536566d-046e-46d7-a0a2-6c193aa3da36 req-cd22d7bf-66ed-48c4-819a-aefb55c89cf6 service nova] Releasing lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 508.879477] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75baeefa-d83a-4854-bc98-022321b5b14f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.888197] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51db26e2-7437-4de1-9132-ef349e016127 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.936493] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 509.083744] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.271s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 509.083744] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 509.086475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.065s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 509.087619] env[62204]: INFO nova.compute.claims [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 509.593206] env[62204]: DEBUG nova.compute.utils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 509.596445] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 509.598015] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 509.790068] env[62204]: DEBUG nova.policy [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83f18922c806425294f40068c35058a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '150ae78bb04a4676aa9d080a357986d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 510.097382] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 510.233173] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a095b10a-7b7d-4d71-8213-796bc7a3a7e8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.242134] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6d45a8-17b0-47b5-a3a1-1bdfe4343f3e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.280861] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdceaa36-fc2d-498b-aeb3-de21456b4cd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.289603] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbed8ef-4602-4667-a44f-75b6e56392d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.305743] env[62204]: DEBUG nova.compute.provider_tree [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 510.767094] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Successfully created port: f6576648-ea3e-40e8-b6df-1fb33c94bc0b {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 510.811117] env[62204]: DEBUG nova.scheduler.client.report [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 510.972959] env[62204]: DEBUG nova.compute.manager [req-49b615d7-2ec0-409d-85f2-b277d39c861e req-157aac73-ac18-4806-b012-f9793afdaaf3 service nova] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Received event network-vif-deleted-a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 511.115840] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 511.147185] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 511.147512] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 511.147636] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 511.147820] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 511.147967] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 511.148566] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 511.149085] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 511.150097] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 511.150368] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 511.150562] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 511.150740] env[62204]: DEBUG nova.virt.hardware [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 511.151642] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba2be80-90bd-44d7-aa1a-b27579567883 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.161344] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852d91b8-10d8-410a-a091-281a45794b52 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 511.316822] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.230s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 511.317098] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 511.321527] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.219s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.325512] env[62204]: INFO nova.compute.claims [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 511.831700] env[62204]: DEBUG nova.compute.utils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 511.833970] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 511.833970] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 511.946433] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.946661] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 511.968203] env[62204]: DEBUG nova.policy [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6898bf7b1fd9481295204dbaa4be001a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f478bc235dc5459baa8a55c234ab228a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 512.105072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "e0a58fc7-7929-4e18-8cc9-1e5074123f06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.105072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "e0a58fc7-7929-4e18-8cc9-1e5074123f06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.342826] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 512.449557] env[62204]: DEBUG nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 512.487854] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c78d37-74c7-4d94-acd6-c6e16b776042 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.495890] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce279329-6d22-46b3-ae03-6ce2b53be607 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.530336] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058b14ba-2989-4f26-9a25-d90e30f210b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.537845] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2886bd0a-ae08-4161-bda0-83e6c272c8db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.556712] env[62204]: DEBUG nova.compute.provider_tree [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 512.610388] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 512.711141] env[62204]: ERROR nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 512.711141] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 512.711141] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 512.711141] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 512.711141] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 512.711141] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 512.711141] env[62204]: ERROR nova.compute.manager raise self.value [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 512.711141] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 512.711141] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 512.711141] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 512.712903] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 512.712903] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 512.712903] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 512.712903] env[62204]: ERROR nova.compute.manager [ 512.712903] env[62204]: Traceback (most recent call last): [ 512.712903] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 512.712903] env[62204]: listener.cb(fileno) [ 512.712903] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 512.712903] env[62204]: result = function(*args, **kwargs) [ 512.712903] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 512.712903] env[62204]: return func(*args, **kwargs) [ 512.712903] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 512.712903] env[62204]: raise e [ 512.712903] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 512.712903] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 512.712903] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 512.712903] env[62204]: created_port_ids = self._update_ports_for_instance( [ 512.712903] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 512.712903] env[62204]: with excutils.save_and_reraise_exception(): [ 512.712903] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 512.712903] env[62204]: self.force_reraise() [ 512.712903] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 512.712903] env[62204]: raise self.value [ 512.712903] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 512.712903] env[62204]: updated_port = self._update_port( [ 512.712903] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 512.712903] env[62204]: _ensure_no_port_binding_failure(port) [ 512.712903] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 512.712903] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 512.717778] env[62204]: nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 512.717778] env[62204]: Removing descriptor: 16 [ 512.717778] env[62204]: ERROR nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Traceback (most recent call last): [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] yield resources [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self.driver.spawn(context, instance, image_meta, [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self._vmops.spawn(context, instance, image_meta, injected_files, [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 512.717778] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] vm_ref = self.build_virtual_machine(instance, [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] vif_infos = vmwarevif.get_vif_info(self._session, [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] for vif in network_info: [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return self._sync_wrapper(fn, *args, **kwargs) [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self.wait() [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self[:] = self._gt.wait() [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return self._exit_event.wait() [ 512.718295] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] result = hub.switch() [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return self.greenlet.switch() [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] result = function(*args, **kwargs) [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return func(*args, **kwargs) [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] raise e [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] nwinfo = self.network_api.allocate_for_instance( [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 512.719607] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] created_port_ids = self._update_ports_for_instance( [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] with excutils.save_and_reraise_exception(): [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self.force_reraise() [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] raise self.value [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] updated_port = self._update_port( [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] _ensure_no_port_binding_failure(port) [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 512.721279] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] raise exception.PortBindingFailed(port_id=port['id']) [ 512.722456] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 512.722456] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] [ 512.722456] env[62204]: INFO nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Terminating instance [ 512.722456] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 512.722456] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquired lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 512.722456] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 512.772693] env[62204]: ERROR nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 512.772693] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 512.772693] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 512.772693] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 512.772693] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 512.772693] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 512.772693] env[62204]: ERROR nova.compute.manager raise self.value [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 512.772693] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 512.772693] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 512.772693] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 512.773438] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 512.773438] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 512.773438] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 512.773438] env[62204]: ERROR nova.compute.manager [ 512.773438] env[62204]: Traceback (most recent call last): [ 512.773438] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 512.773438] env[62204]: listener.cb(fileno) [ 512.773438] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 512.773438] env[62204]: result = function(*args, **kwargs) [ 512.773438] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 512.773438] env[62204]: return func(*args, **kwargs) [ 512.773438] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 512.773438] env[62204]: raise e [ 512.773438] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 512.773438] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 512.773438] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 512.773438] env[62204]: created_port_ids = self._update_ports_for_instance( [ 512.773438] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 512.773438] env[62204]: with excutils.save_and_reraise_exception(): [ 512.773438] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 512.773438] env[62204]: self.force_reraise() [ 512.773438] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 512.773438] env[62204]: raise self.value [ 512.773438] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 512.773438] env[62204]: updated_port = self._update_port( [ 512.773438] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 512.773438] env[62204]: _ensure_no_port_binding_failure(port) [ 512.773438] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 512.773438] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 512.774211] env[62204]: nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 512.774211] env[62204]: Removing descriptor: 14 [ 512.774211] env[62204]: ERROR nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Traceback (most recent call last): [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] yield resources [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self.driver.spawn(context, instance, image_meta, [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self._vmops.spawn(context, instance, image_meta, injected_files, [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 512.774211] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] vm_ref = self.build_virtual_machine(instance, [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] vif_infos = vmwarevif.get_vif_info(self._session, [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] for vif in network_info: [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return self._sync_wrapper(fn, *args, **kwargs) [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self.wait() [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self[:] = self._gt.wait() [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return self._exit_event.wait() [ 512.774618] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] result = hub.switch() [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return self.greenlet.switch() [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] result = function(*args, **kwargs) [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return func(*args, **kwargs) [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] raise e [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] nwinfo = self.network_api.allocate_for_instance( [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 512.774956] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] created_port_ids = self._update_ports_for_instance( [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] with excutils.save_and_reraise_exception(): [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self.force_reraise() [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] raise self.value [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] updated_port = self._update_port( [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] _ensure_no_port_binding_failure(port) [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 512.775368] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] raise exception.PortBindingFailed(port_id=port['id']) [ 512.775707] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 512.775707] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] [ 512.775707] env[62204]: INFO nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Terminating instance [ 512.775707] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Successfully created port: 30ef29f8-f42b-42b9-b1e1-f1e95af0982c {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 512.777566] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquiring lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 512.777566] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquired lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 512.777688] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 512.980549] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.085446] env[62204]: ERROR nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [req-3423f60b-792e-4865-88ba-954d7df3397f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3423f60b-792e-4865-88ba-954d7df3397f"}]} [ 513.107680] env[62204]: DEBUG nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Refreshing inventories for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 513.127357] env[62204]: DEBUG nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updating ProviderTree inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 513.127357] env[62204]: DEBUG nova.compute.provider_tree [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 157, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 513.132225] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.141506] env[62204]: DEBUG nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Refreshing aggregate associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, aggregates: None {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 513.164668] env[62204]: DEBUG nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Refreshing trait associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 513.277554] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 513.309979] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 513.358301] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 513.368513] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.396444] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799eac6f-9ef8-4671-8830-6d1cd7d19068 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.401651] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 513.401876] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 513.402051] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 513.402235] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 513.402474] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 513.402646] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 513.402852] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 513.403014] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 513.403183] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 513.403343] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 513.403510] env[62204]: DEBUG nova.virt.hardware [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 513.404630] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7112274-ff27-4370-ac80-c6839bb06e18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.417146] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc8a850-7e7d-4ef2-a5d9-0c8d1868c0d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.423691] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a81a1b-a467-437c-8f06-5de332c8bd2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.457209] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 513.459243] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b1269c-0cd1-43c5-8140-2fbf6258356e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.476171] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34c0fec-10f1-4fbb-bb80-d4cca8f1c5cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.482997] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 513.493589] env[62204]: DEBUG nova.compute.provider_tree [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 513.967021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Releasing lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 513.967021] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 513.967021] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 513.967021] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a5a114d-3e12-4785-8c5d-85df45104d9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.974948] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4452ea8-24fe-4019-9413-776251958504 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.995963] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Releasing lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 513.996424] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 513.996620] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 513.999994] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6169248-c6da-40c0-85d2-e623635fbb82 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.006250] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8b6abe21-275f-474d-801d-b94627e8e832 could not be found. [ 514.006470] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 514.006692] env[62204]: INFO nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Took 0.04 seconds to destroy the instance on the hypervisor. [ 514.008014] env[62204]: DEBUG oslo.service.loopingcall [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 514.008014] env[62204]: DEBUG nova.compute.manager [-] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 514.008014] env[62204]: DEBUG nova.network.neutron [-] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 514.013499] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9027ce5-d056-47ee-bd74-ba348fa8d853 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.036520] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 319eed10-3985-45c3-b864-7c984a8b9819 could not be found. [ 514.037395] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 514.037395] env[62204]: INFO nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Took 0.04 seconds to destroy the instance on the hypervisor. [ 514.037395] env[62204]: DEBUG oslo.service.loopingcall [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 514.037395] env[62204]: DEBUG nova.compute.manager [-] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 514.038686] env[62204]: DEBUG nova.network.neutron [-] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 514.041806] env[62204]: DEBUG nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 12 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 514.042095] env[62204]: DEBUG nova.compute.provider_tree [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 12 to 13 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 514.042961] env[62204]: DEBUG nova.compute.provider_tree [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 514.066233] env[62204]: DEBUG nova.network.neutron [-] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 514.067313] env[62204]: DEBUG nova.network.neutron [-] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 514.548239] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.228s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.548877] env[62204]: DEBUG nova.compute.manager [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 514.556146] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 5.803s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 514.571265] env[62204]: DEBUG nova.network.neutron [-] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 514.571265] env[62204]: DEBUG nova.network.neutron [-] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 515.056931] env[62204]: DEBUG nova.compute.claims [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 515.057161] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.078897] env[62204]: INFO nova.compute.manager [-] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Took 1.07 seconds to deallocate network for instance. [ 515.085601] env[62204]: DEBUG nova.compute.manager [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Received event network-changed-f6576648-ea3e-40e8-b6df-1fb33c94bc0b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 515.085798] env[62204]: DEBUG nova.compute.manager [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Refreshing instance network info cache due to event network-changed-f6576648-ea3e-40e8-b6df-1fb33c94bc0b. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 515.088173] env[62204]: DEBUG oslo_concurrency.lockutils [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] Acquiring lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.088173] env[62204]: DEBUG oslo_concurrency.lockutils [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] Acquired lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.088173] env[62204]: DEBUG nova.network.neutron [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Refreshing network info cache for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 515.088601] env[62204]: DEBUG nova.compute.claims [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 515.089673] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.093281] env[62204]: INFO nova.compute.manager [-] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Took 1.06 seconds to deallocate network for instance. [ 515.097243] env[62204]: DEBUG nova.compute.claims [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 515.097243] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.160511] env[62204]: ERROR nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 515.160511] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 515.160511] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.160511] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.160511] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.160511] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.160511] env[62204]: ERROR nova.compute.manager raise self.value [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.160511] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 515.160511] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.160511] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 515.160938] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.160938] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 515.160938] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 515.160938] env[62204]: ERROR nova.compute.manager [ 515.160938] env[62204]: Traceback (most recent call last): [ 515.160938] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 515.160938] env[62204]: listener.cb(fileno) [ 515.160938] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.160938] env[62204]: result = function(*args, **kwargs) [ 515.160938] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.160938] env[62204]: return func(*args, **kwargs) [ 515.160938] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 515.160938] env[62204]: raise e [ 515.160938] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 515.160938] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 515.160938] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.160938] env[62204]: created_port_ids = self._update_ports_for_instance( [ 515.160938] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.160938] env[62204]: with excutils.save_and_reraise_exception(): [ 515.160938] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.160938] env[62204]: self.force_reraise() [ 515.160938] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.160938] env[62204]: raise self.value [ 515.160938] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.160938] env[62204]: updated_port = self._update_port( [ 515.160938] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.160938] env[62204]: _ensure_no_port_binding_failure(port) [ 515.160938] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.160938] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 515.161637] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 515.161637] env[62204]: Removing descriptor: 17 [ 515.161637] env[62204]: ERROR nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Traceback (most recent call last): [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] yield resources [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self.driver.spawn(context, instance, image_meta, [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self._vmops.spawn(context, instance, image_meta, injected_files, [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 515.161637] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] vm_ref = self.build_virtual_machine(instance, [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] vif_infos = vmwarevif.get_vif_info(self._session, [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] for vif in network_info: [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return self._sync_wrapper(fn, *args, **kwargs) [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self.wait() [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self[:] = self._gt.wait() [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return self._exit_event.wait() [ 515.162097] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] result = hub.switch() [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return self.greenlet.switch() [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] result = function(*args, **kwargs) [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return func(*args, **kwargs) [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] raise e [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] nwinfo = self.network_api.allocate_for_instance( [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 515.162534] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] created_port_ids = self._update_ports_for_instance( [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] with excutils.save_and_reraise_exception(): [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self.force_reraise() [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] raise self.value [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] updated_port = self._update_port( [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] _ensure_no_port_binding_failure(port) [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 515.162864] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] raise exception.PortBindingFailed(port_id=port['id']) [ 515.163693] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 515.163693] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] [ 515.163693] env[62204]: INFO nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Terminating instance [ 515.163693] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquiring lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.163693] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquired lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.163693] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 515.247963] env[62204]: DEBUG nova.compute.manager [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Received event network-changed-dec4a747-07b3-4b9c-ac2b-32a7106619a4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 515.248204] env[62204]: DEBUG nova.compute.manager [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Refreshing instance network info cache due to event network-changed-dec4a747-07b3-4b9c-ac2b-32a7106619a4. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 515.254381] env[62204]: DEBUG oslo_concurrency.lockutils [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] Acquiring lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 515.254540] env[62204]: DEBUG oslo_concurrency.lockutils [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] Acquired lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 515.254716] env[62204]: DEBUG nova.network.neutron [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Refreshing network info cache for port dec4a747-07b3-4b9c-ac2b-32a7106619a4 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 515.291883] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "62721b0d-0763-43ae-b221-271266bf8794" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.296390] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "62721b0d-0763-43ae-b221-271266bf8794" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.298487] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da3ba5d-0048-41ee-9ad4-46ecbcfc2da2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.318735] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4da4b6-b45b-44cb-9780-26e679dbbc73 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.359695] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e0f0d6-9d1d-4cd9-917c-1d5e13083dcf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.370429] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa56d5d1-e73d-4d3b-bdbc-dcad6297314c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.388780] env[62204]: DEBUG nova.compute.provider_tree [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 515.651248] env[62204]: DEBUG nova.network.neutron [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 515.718754] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 515.810986] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 515.813756] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquiring lock "d7f15c60-04ac-429e-b16f-8774f9a050b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.813756] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Lock "d7f15c60-04ac-429e-b16f-8774f9a050b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.816699] env[62204]: DEBUG nova.network.neutron [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 515.860072] env[62204]: DEBUG nova.network.neutron [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 515.891779] env[62204]: DEBUG nova.scheduler.client.report [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 516.038746] env[62204]: DEBUG nova.network.neutron [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.107181] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 516.324985] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.365385] env[62204]: DEBUG oslo_concurrency.lockutils [req-a1c925d8-70a1-4bfd-acfb-77b3031b192d req-860d8a4d-f18b-4fc9-9d2a-812c9ee586dd service nova] Releasing lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 516.372176] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.402327] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 516.402327] env[62204]: ERROR nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Traceback (most recent call last): [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self.driver.spawn(context, instance, image_meta, [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 516.402327] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] vm_ref = self.build_virtual_machine(instance, [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] vif_infos = vmwarevif.get_vif_info(self._session, [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] for vif in network_info: [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return self._sync_wrapper(fn, *args, **kwargs) [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self.wait() [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self[:] = self._gt.wait() [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return self._exit_event.wait() [ 516.402808] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] result = hub.switch() [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return self.greenlet.switch() [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] result = function(*args, **kwargs) [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] return func(*args, **kwargs) [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] raise e [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] nwinfo = self.network_api.allocate_for_instance( [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 516.403166] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] created_port_ids = self._update_ports_for_instance( [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] with excutils.save_and_reraise_exception(): [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] self.force_reraise() [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] raise self.value [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] updated_port = self._update_port( [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] _ensure_no_port_binding_failure(port) [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 516.403513] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] raise exception.PortBindingFailed(port_id=port['id']) [ 516.403831] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] nova.exception.PortBindingFailed: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. [ 516.403831] env[62204]: ERROR nova.compute.manager [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] [ 516.403831] env[62204]: DEBUG nova.compute.utils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 516.407511] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.469s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.410478] env[62204]: INFO nova.compute.claims [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 516.417487] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Build of instance 91eacc12-5026-4f59-bf2c-babff6c8d42f was re-scheduled: Binding failed for port a3d56c2f-0cf9-4416-9bb1-43f37f5b81f2, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 516.417925] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 516.418190] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 516.418348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquired lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 516.418496] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 516.542686] env[62204]: DEBUG oslo_concurrency.lockutils [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] Releasing lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 516.543106] env[62204]: DEBUG nova.compute.manager [req-0871b91e-6a97-4cd2-9778-cc83aa114fbe req-72f5f875-cdd9-4336-bdea-8404d8b021a5 service nova] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Received event network-vif-deleted-dec4a747-07b3-4b9c-ac2b-32a7106619a4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 516.612115] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Releasing lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 516.613534] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 516.613614] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 516.614205] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8be4b11-8618-4334-bf08-698a3bfb9b16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.626825] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda5e478-d3fd-408f-8ef1-9d8945fbe9e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.663700] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fdb9ae1a-a561-475c-9e13-803765c21582 could not be found. [ 516.663700] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 516.663700] env[62204]: INFO nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Took 0.05 seconds to destroy the instance on the hypervisor. [ 516.663700] env[62204]: DEBUG oslo.service.loopingcall [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 516.663700] env[62204]: DEBUG nova.compute.manager [-] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 516.663700] env[62204]: DEBUG nova.network.neutron [-] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 516.702581] env[62204]: DEBUG nova.network.neutron [-] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 516.869167] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.947123] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 517.078300] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.206773] env[62204]: DEBUG nova.network.neutron [-] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 517.581322] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Releasing lock "refresh_cache-91eacc12-5026-4f59-bf2c-babff6c8d42f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 517.583900] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 517.583900] env[62204]: DEBUG nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 517.583900] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 517.609193] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 517.622503] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fae155c-3fe4-4720-8493-a15f24bb8738 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.630878] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3253c39f-9b76-49e2-be84-ca7aceafd752 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.670068] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b58a3a-8a47-48f9-ac78-832a22a4efa8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.677872] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbbdfc2-2328-4381-88e4-94e9eef531d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.693854] env[62204]: DEBUG nova.compute.provider_tree [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 517.712729] env[62204]: INFO nova.compute.manager [-] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Took 1.05 seconds to deallocate network for instance. [ 517.715200] env[62204]: DEBUG nova.compute.claims [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 517.715695] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.113177] env[62204]: DEBUG nova.network.neutron [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 518.196500] env[62204]: DEBUG nova.scheduler.client.report [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 518.620027] env[62204]: INFO nova.compute.manager [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: 91eacc12-5026-4f59-bf2c-babff6c8d42f] Took 1.04 seconds to deallocate network for instance. [ 518.703212] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.296s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 518.703212] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 518.705737] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.725s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 518.707442] env[62204]: INFO nova.compute.claims [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 518.870986] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquiring lock "6e999bd0-38be-42d5-b2b7-3f9196fb941e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.871239] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Lock "6e999bd0-38be-42d5-b2b7-3f9196fb941e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.088155] env[62204]: DEBUG nova.compute.manager [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Received event network-vif-deleted-f6576648-ea3e-40e8-b6df-1fb33c94bc0b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 519.088397] env[62204]: DEBUG nova.compute.manager [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Received event network-changed-30ef29f8-f42b-42b9-b1e1-f1e95af0982c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 519.088683] env[62204]: DEBUG nova.compute.manager [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Refreshing instance network info cache due to event network-changed-30ef29f8-f42b-42b9-b1e1-f1e95af0982c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 519.088803] env[62204]: DEBUG oslo_concurrency.lockutils [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] Acquiring lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.088899] env[62204]: DEBUG oslo_concurrency.lockutils [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] Acquired lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 519.089646] env[62204]: DEBUG nova.network.neutron [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Refreshing network info cache for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 519.212805] env[62204]: DEBUG nova.compute.utils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 519.215610] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 519.215610] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 519.250770] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquiring lock "cb218f34-dec8-46ae-8659-6c37df5d51b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.252048] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Lock "cb218f34-dec8-46ae-8659-6c37df5d51b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.331882] env[62204]: DEBUG nova.policy [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41d72a7738af44089d5195e05f81a5ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35361577b07d4f87bb1e951fc0b1e8b6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 519.619182] env[62204]: DEBUG nova.network.neutron [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 519.702083] env[62204]: INFO nova.scheduler.client.report [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Deleted allocations for instance 91eacc12-5026-4f59-bf2c-babff6c8d42f [ 519.722284] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 519.787722] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquiring lock "c1fc621a-bc36-4d55-beec-cdc446bc8d06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.788930] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Lock "c1fc621a-bc36-4d55-beec-cdc446bc8d06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.859636] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 519.859636] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 519.859636] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 519.859636] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Rebuilding the list of instances to heal {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 519.902725] env[62204]: DEBUG nova.network.neutron [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.960876] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Successfully created port: 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 520.038661] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a81974-2b39-4011-bed7-6304ff9b0a79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.052902] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3404ccb1-21fa-436f-914d-9a3e40ee8c84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.089824] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c111ffe0-8fc5-4548-b576-81139edd024e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.097854] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11362fde-216f-466e-9248-c306a9b27338 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.113538] env[62204]: DEBUG nova.compute.provider_tree [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 520.211936] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5954279d-d108-4739-bf9c-06c7eee2c6b6 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "91eacc12-5026-4f59-bf2c-babff6c8d42f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.129s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 520.363566] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 520.363566] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 520.364278] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 520.364476] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 520.364735] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 520.364958] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 520.365554] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Didn't find any instances for network info cache update. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 520.365808] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.367530] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.367530] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.367530] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.367530] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.368683] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.406465] env[62204]: DEBUG oslo_concurrency.lockutils [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] Releasing lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.406795] env[62204]: DEBUG nova.compute.manager [req-e91040ed-cc4b-405c-a5c5-ccafd2130d3c req-62b343d1-53ce-4614-a967-beb1dc6363e1 service nova] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Received event network-vif-deleted-30ef29f8-f42b-42b9-b1e1-f1e95af0982c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 520.618272] env[62204]: DEBUG nova.scheduler.client.report [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 520.716711] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 520.747760] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 520.780109] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 520.780109] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 520.780109] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 520.780263] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 520.780263] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 520.780263] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 520.780787] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 520.781189] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 520.781478] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 520.781968] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 520.782292] env[62204]: DEBUG nova.virt.hardware [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 520.783795] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6f055d-bd0b-4563-a513-ca37d891ad03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.796252] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33947151-3ece-4629-b81e-9aa709fd8a85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.874932] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Getting list of instances from cluster (obj){ [ 520.874932] env[62204]: value = "domain-c8" [ 520.874932] env[62204]: _type = "ClusterComputeResource" [ 520.874932] env[62204]: } {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 520.875983] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d1b1e4-2a61-4a3a-a13a-0a2494d91423 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.886257] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Got total of 0 instances {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 520.887449] env[62204]: WARNING nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] While synchronizing instance power states, found 6 instances in the database and 0 instances on the hypervisor. [ 520.887449] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 8b6abe21-275f-474d-801d-b94627e8e832 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 520.887449] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 319eed10-3985-45c3-b864-7c984a8b9819 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 520.887449] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid fdb9ae1a-a561-475c-9e13-803765c21582 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 520.889516] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid b0365735-8e34-4129-93c0-d7c8e79fc5b2 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 520.889516] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 04f9b938-7bcd-4b49-9115-0693fb478326 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 520.889516] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 4644dfab-0758-43e6-bbcc-9930f086a4e5 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 520.890038] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "8b6abe21-275f-474d-801d-b94627e8e832" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.890444] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "319eed10-3985-45c3-b864-7c984a8b9819" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.892339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "fdb9ae1a-a561-475c-9e13-803765c21582" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.892339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.892339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "04f9b938-7bcd-4b49-9115-0693fb478326" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.892339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 520.892339] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 520.892528] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 520.892528] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 521.124822] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 521.126653] env[62204]: DEBUG nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 521.130542] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 521.132473] env[62204]: INFO nova.compute.claims [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 521.243594] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.398161] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.638194] env[62204]: DEBUG nova.compute.utils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 521.645798] env[62204]: DEBUG nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 522.144808] env[62204]: DEBUG nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 522.372771] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bec9f2-64ea-4dc9-878c-5544ea4eaf92 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.384497] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203a1ee7-5389-443a-a186-8d2eae2158f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.418933] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-047799be-7946-4819-a262-02c45938a708 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.430257] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d979303b-93f4-436a-8cc5-8ed6ed371a9d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.451172] env[62204]: DEBUG nova.compute.provider_tree [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 522.804148] env[62204]: ERROR nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 522.804148] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 522.804148] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.804148] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.804148] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.804148] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.804148] env[62204]: ERROR nova.compute.manager raise self.value [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.804148] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 522.804148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.804148] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 522.804640] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.804640] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 522.804640] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 522.804640] env[62204]: ERROR nova.compute.manager [ 522.804640] env[62204]: Traceback (most recent call last): [ 522.804640] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 522.804640] env[62204]: listener.cb(fileno) [ 522.804640] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.804640] env[62204]: result = function(*args, **kwargs) [ 522.804640] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.804640] env[62204]: return func(*args, **kwargs) [ 522.804640] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 522.804640] env[62204]: raise e [ 522.804640] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 522.804640] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 522.804640] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.804640] env[62204]: created_port_ids = self._update_ports_for_instance( [ 522.804640] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.804640] env[62204]: with excutils.save_and_reraise_exception(): [ 522.804640] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.804640] env[62204]: self.force_reraise() [ 522.804640] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.804640] env[62204]: raise self.value [ 522.804640] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.804640] env[62204]: updated_port = self._update_port( [ 522.804640] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.804640] env[62204]: _ensure_no_port_binding_failure(port) [ 522.804640] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.804640] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 522.805495] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 522.805495] env[62204]: Removing descriptor: 16 [ 522.805495] env[62204]: ERROR nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Traceback (most recent call last): [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] yield resources [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self.driver.spawn(context, instance, image_meta, [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self._vmops.spawn(context, instance, image_meta, injected_files, [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 522.805495] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] vm_ref = self.build_virtual_machine(instance, [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] vif_infos = vmwarevif.get_vif_info(self._session, [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] for vif in network_info: [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return self._sync_wrapper(fn, *args, **kwargs) [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self.wait() [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self[:] = self._gt.wait() [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return self._exit_event.wait() [ 522.805864] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] result = hub.switch() [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return self.greenlet.switch() [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] result = function(*args, **kwargs) [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return func(*args, **kwargs) [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] raise e [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] nwinfo = self.network_api.allocate_for_instance( [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 522.806211] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] created_port_ids = self._update_ports_for_instance( [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] with excutils.save_and_reraise_exception(): [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self.force_reraise() [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] raise self.value [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] updated_port = self._update_port( [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] _ensure_no_port_binding_failure(port) [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 522.806646] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] raise exception.PortBindingFailed(port_id=port['id']) [ 522.807091] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 522.807091] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] [ 522.807091] env[62204]: INFO nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Terminating instance [ 522.810283] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquiring lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 522.810283] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquired lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 522.810283] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 522.955964] env[62204]: DEBUG nova.scheduler.client.report [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 523.071101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 523.071504] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.166233] env[62204]: DEBUG nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 523.172212] env[62204]: DEBUG nova.compute.manager [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Received event network-changed-7ffbdb3c-f538-4c9b-bf17-0b8afdb74418 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 523.172212] env[62204]: DEBUG nova.compute.manager [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Refreshing instance network info cache due to event network-changed-7ffbdb3c-f538-4c9b-bf17-0b8afdb74418. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 523.172212] env[62204]: DEBUG oslo_concurrency.lockutils [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] Acquiring lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.200239] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 523.200239] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 523.200239] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 523.200239] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 523.200728] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 523.200728] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 523.200728] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 523.200728] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 523.200904] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 523.201057] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 523.201224] env[62204]: DEBUG nova.virt.hardware [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 523.202187] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beacb9f7-0e09-4b76-b4c2-abfab65db729 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.212414] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94603e08-8316-499d-9302-90250463c520 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.228055] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 523.237859] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 523.239030] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d893f5f9-46f8-4877-9e01-850714ca9b32 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.255799] env[62204]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 523.256018] env[62204]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62204) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 523.256394] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 523.256593] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating folder: Project (3242dba0490f4eac81e62bba5a00c89e). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 523.256830] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48089d69-c19d-4e69-87f3-2cc1f321398d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.272324] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Created folder: Project (3242dba0490f4eac81e62bba5a00c89e) in parent group-v259933. [ 523.272740] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating folder: Instances. Parent ref: group-v259938. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 523.272837] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15be3212-b49f-48b9-81a1-6f46db687ace {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.284686] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Created folder: Instances in parent group-v259938. [ 523.285036] env[62204]: DEBUG oslo.service.loopingcall [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 523.285142] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 523.285452] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a0fd3ee-ff49-45d8-abd5-57abdff4f7c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.305739] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 523.305739] env[62204]: value = "task-1199362" [ 523.305739] env[62204]: _type = "Task" [ 523.305739] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.316090] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199362, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.352172] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 523.464751] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.464751] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 523.466864] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.410s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 523.552567] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 523.815743] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199362, 'name': CreateVM_Task, 'duration_secs': 0.399928} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 523.815934] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 523.816933] env[62204]: DEBUG oslo_vmware.service [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afed129-2750-4000-a407-b3c9d94ba95f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.823988] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.824415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.825202] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 523.825550] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a7c5265-23d9-4395-8793-ee7fa6a39cf4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.831298] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 523.831298] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d68250-0ba9-feb1-10d5-4b1403d3066a" [ 523.831298] env[62204]: _type = "Task" [ 523.831298] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 523.842098] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d68250-0ba9-feb1-10d5-4b1403d3066a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 523.972631] env[62204]: DEBUG nova.compute.utils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 523.978864] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 523.979246] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 524.057017] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Releasing lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.057557] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 524.057783] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 524.058337] env[62204]: DEBUG oslo_concurrency.lockutils [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] Acquired lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.058520] env[62204]: DEBUG nova.network.neutron [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Refreshing network info cache for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 524.059780] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a88c0c3-3d8e-4a28-b732-5bb85b7b495f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.070114] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60ef31b-e19f-4bfd-b66e-649e298dde86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.100451] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04f9b938-7bcd-4b49-9115-0693fb478326 could not be found. [ 524.100730] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 524.100826] env[62204]: INFO nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Took 0.04 seconds to destroy the instance on the hypervisor. [ 524.101093] env[62204]: DEBUG oslo.service.loopingcall [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 524.101301] env[62204]: DEBUG nova.compute.manager [-] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 524.101390] env[62204]: DEBUG nova.network.neutron [-] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 524.120045] env[62204]: DEBUG nova.policy [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f3d190e13254332aa1ecd1b6d68eb52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38d8f190ca31409ea30e8508e62bb073', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 524.154141] env[62204]: DEBUG nova.network.neutron [-] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.244416] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985a6644-d3ee-4a9b-9579-8772bfe7e8c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.254836] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dfe57e-752a-48b0-a112-83eb18459404 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.288165] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad7dad0-40e8-40bf-8f9c-c1f62abccb3e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.295871] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfcc492-53a9-4240-b9be-8e4b3815927f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.310452] env[62204]: DEBUG nova.compute.provider_tree [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 524.342129] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.342203] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 524.342418] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.342701] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.342998] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 524.343264] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85df7714-0a72-4681-800a-c27599e90ebd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.360623] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 524.361018] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 524.362032] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e0df74-a7ba-41ef-a7ee-b02bd60cbe2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.369869] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffcabcaf-96d9-4d82-9c7d-5653073cca70 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.375134] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 524.375134] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d88f32-3ed2-40a2-9fcb-a025a2b96cf4" [ 524.375134] env[62204]: _type = "Task" [ 524.375134] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.383773] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d88f32-3ed2-40a2-9fcb-a025a2b96cf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.482767] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 524.648945] env[62204]: DEBUG nova.network.neutron [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.657234] env[62204]: DEBUG nova.network.neutron [-] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.740487] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquiring lock "38104ca0-29bd-4d1e-b20c-47f76491ce32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 524.741209] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Lock "38104ca0-29bd-4d1e-b20c-47f76491ce32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.815444] env[62204]: DEBUG nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 524.885679] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 524.886189] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating directory with path [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 524.886531] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c856930-3a1b-407a-ba28-75eaa607775c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.898121] env[62204]: DEBUG nova.network.neutron [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.907160] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Created directory with path [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 524.909018] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Fetch image to [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 524.909018] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Downloading image file data c0e4d3a1-f965-49e2-ab05-fbf425872dcc to [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk on the data store datastore1 {{(pid=62204) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 524.909018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b645169-0be0-46d9-b6b0-14179ee9e961 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.919365] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341d9d97-3021-43a1-8c64-3c026243df1d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.930218] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952390cb-365e-43a8-a87e-95e8410f69da {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.968547] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797709ea-1a2e-4e32-8aff-1416c0d50b3e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.975865] env[62204]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d71e881a-2a68-4af8-abf7-10ffef6e9d99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.007894] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Downloading image file data c0e4d3a1-f965-49e2-ab05-fbf425872dcc to the data store datastore1 {{(pid=62204) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 525.076084] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 525.160734] env[62204]: INFO nova.compute.manager [-] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Took 1.06 seconds to deallocate network for instance. [ 525.163725] env[62204]: DEBUG nova.compute.claims [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 525.163907] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.169928] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Successfully created port: 339cf60f-af71-4eaa-8b7e-b1434526bbcf {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 525.323302] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.856s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 525.324148] env[62204]: DEBUG nova.compute.utils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Conflict updating instance b0365735-8e34-4129-93c0-d7c8e79fc5b2. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 525.325727] env[62204]: DEBUG nova.compute.manager [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Instance disappeared during build. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 525.325968] env[62204]: DEBUG nova.compute.manager [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 525.326773] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "refresh_cache-b0365735-8e34-4129-93c0-d7c8e79fc5b2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.326993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquired lock "refresh_cache-b0365735-8e34-4129-93c0-d7c8e79fc5b2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.327318] env[62204]: DEBUG nova.network.neutron [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 525.331757] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.243s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.401733] env[62204]: DEBUG oslo_concurrency.lockutils [req-99cc5f64-eeef-4bec-b183-55d12270e145 req-a1ff6d32-b3be-46bc-be58-f29c961a44a1 service nova] Releasing lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 525.492326] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 525.532121] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 525.532405] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 525.532595] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 525.533180] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 525.533180] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 525.534847] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 525.535158] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 525.535347] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 525.535869] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 525.535869] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 525.535869] env[62204]: DEBUG nova.virt.hardware [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 525.537197] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c90d49-03e0-4ff5-82b9-f5c267df736a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.553505] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9b09ca-ec25-40e4-bf2a-6b0e2097432b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.825963] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 525.826406] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 525.880135] env[62204]: DEBUG nova.network.neutron [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 525.887424] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Downloaded image file data c0e4d3a1-f965-49e2-ab05-fbf425872dcc to vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk on the data store datastore1 {{(pid=62204) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 525.893025] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 525.893025] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copying Virtual Disk [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk to [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 525.893025] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b9e6eda-b4d9-46af-93e8-e7937ffdc0ab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.898639] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 525.898639] env[62204]: value = "task-1199364" [ 525.898639] env[62204]: _type = "Task" [ 525.898639] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.906597] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.058025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "ac70a103-cb49-4cef-8069-dd0bb265633a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.058191] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "ac70a103-cb49-4cef-8069-dd0bb265633a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.132148] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6fd66de-75eb-4559-934d-5193853cb2e0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.139947] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026aab9e-e2b7-48f4-bd97-24ae821b88c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.172390] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d8d737-176c-40b0-b1c9-eddc2ac07096 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.180175] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb12680-34a6-40ad-b28e-7f72787636f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.186861] env[62204]: DEBUG nova.network.neutron [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.198614] env[62204]: DEBUG nova.compute.provider_tree [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 526.411475] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199364, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 526.691795] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Releasing lock "refresh_cache-b0365735-8e34-4129-93c0-d7c8e79fc5b2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.693600] env[62204]: DEBUG nova.compute.manager [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 526.693600] env[62204]: DEBUG nova.compute.manager [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Skipping network deallocation for instance since networking was not requested. {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 526.704118] env[62204]: DEBUG nova.scheduler.client.report [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 526.723645] env[62204]: DEBUG nova.compute.manager [req-c0f1cb77-be08-4a4e-b267-48a26bd44c6b req-6d307e71-e011-48e9-8363-5543d2636133 service nova] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Received event network-vif-deleted-7ffbdb3c-f538-4c9b-bf17-0b8afdb74418 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 526.915164] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683784} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 526.915164] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copied Virtual Disk [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk to [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 526.915164] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleting the datastore file [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 526.915471] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0f28bd13-b607-489a-9523-9f3a2fab4efe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.923807] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 526.923807] env[62204]: value = "task-1199365" [ 526.923807] env[62204]: _type = "Task" [ 526.923807] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.934488] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199365, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.211629] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.880s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.212408] env[62204]: ERROR nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Traceback (most recent call last): [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self.driver.spawn(context, instance, image_meta, [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self._vmops.spawn(context, instance, image_meta, injected_files, [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] vm_ref = self.build_virtual_machine(instance, [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] vif_infos = vmwarevif.get_vif_info(self._session, [ 527.212408] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] for vif in network_info: [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return self._sync_wrapper(fn, *args, **kwargs) [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self.wait() [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self[:] = self._gt.wait() [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return self._exit_event.wait() [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] result = hub.switch() [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 527.213487] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return self.greenlet.switch() [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] result = function(*args, **kwargs) [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] return func(*args, **kwargs) [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] raise e [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] nwinfo = self.network_api.allocate_for_instance( [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] created_port_ids = self._update_ports_for_instance( [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] with excutils.save_and_reraise_exception(): [ 527.214113] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] self.force_reraise() [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] raise self.value [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] updated_port = self._update_port( [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] _ensure_no_port_binding_failure(port) [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] raise exception.PortBindingFailed(port_id=port['id']) [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] nova.exception.PortBindingFailed: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. [ 527.214667] env[62204]: ERROR nova.compute.manager [instance: 8b6abe21-275f-474d-801d-b94627e8e832] [ 527.214998] env[62204]: DEBUG nova.compute.utils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 527.219889] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.123s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.223623] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Build of instance 8b6abe21-275f-474d-801d-b94627e8e832 was re-scheduled: Binding failed for port dec4a747-07b3-4b9c-ac2b-32a7106619a4, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 527.228888] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 527.229935] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquiring lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.229935] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Acquired lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.229935] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 527.439654] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199365, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026868} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.440489] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 527.440489] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Moving file from [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba/c0e4d3a1-f965-49e2-ab05-fbf425872dcc to [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc. {{(pid=62204) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 527.440489] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-ef051bb9-afd6-4555-8617-7ae9bbc0864b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.452137] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 527.452137] env[62204]: value = "task-1199366" [ 527.452137] env[62204]: _type = "Task" [ 527.452137] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.461441] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199366, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.752368] env[62204]: INFO nova.scheduler.client.report [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Deleted allocations for instance b0365735-8e34-4129-93c0-d7c8e79fc5b2 [ 527.752916] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f38be0f-effa-4d10-a694-9cea78fe7415 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.695s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.757059] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 14.389s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.757407] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.757893] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.758244] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 527.762036] env[62204]: INFO nova.compute.manager [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Terminating instance [ 527.765388] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquiring lock "refresh_cache-b0365735-8e34-4129-93c0-d7c8e79fc5b2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.765573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Acquired lock "refresh_cache-b0365735-8e34-4129-93c0-d7c8e79fc5b2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.765765] env[62204]: DEBUG nova.network.neutron [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 527.786562] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 527.950043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "cab990d6-c8e5-49ce-8274-9c59904193ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.950274] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "cab990d6-c8e5-49ce-8274-9c59904193ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.970502] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquiring lock "111c0b93-2f02-4f30-9389-0b7f9b041ee8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 527.970728] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Lock "111c0b93-2f02-4f30-9389-0b7f9b041ee8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 527.976550] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199366, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.025811} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.976550] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] File moved {{(pid=62204) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 527.976550] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Cleaning up location [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 527.976867] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleting the datastore file [datastore1] vmware_temp/52350d32-3ff3-4356-bf50-9ff8777f09ba {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 527.976982] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dd382d5-165a-4b94-baa8-2121a022bc0f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.986958] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 527.986958] env[62204]: value = "task-1199367" [ 527.986958] env[62204]: _type = "Task" [ 527.986958] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.999655] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.023287] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.054760] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4646e376-3e3b-409d-a2d2-2e9d20f4b21c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.063352] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebed77f-1ccc-4dd7-8192-ae76863bcee3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.095403] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d4069b-7b14-462e-b075-bdca60bac3a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.103178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9f9758-66f8-4b47-966d-daebd5e9d82c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.118765] env[62204]: DEBUG nova.compute.provider_tree [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.263097] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 528.409203] env[62204]: DEBUG nova.network.neutron [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.497262] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026965} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 528.497543] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 528.498230] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1805513d-3ecd-4aec-9352-99e9d1052092 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.504976] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 528.504976] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5214b93b-f95e-85f2-8761-87f34a4a4605" [ 528.504976] env[62204]: _type = "Task" [ 528.504976] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 528.515672] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5214b93b-f95e-85f2-8761-87f34a4a4605, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 528.529628] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Releasing lock "refresh_cache-8b6abe21-275f-474d-801d-b94627e8e832" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 528.529901] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 528.530047] env[62204]: DEBUG nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.530217] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 528.566266] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.622722] env[62204]: DEBUG nova.scheduler.client.report [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.636411] env[62204]: DEBUG nova.network.neutron [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.793195] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.020795] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5214b93b-f95e-85f2-8761-87f34a4a4605, 'name': SearchDatastore_Task, 'duration_secs': 0.00998} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 529.021134] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.021398] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 529.021665] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bffc3528-747f-4439-9359-a7a47b00416f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.032388] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 529.032388] env[62204]: value = "task-1199368" [ 529.032388] env[62204]: _type = "Task" [ 529.032388] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.042028] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.069590] env[62204]: DEBUG nova.network.neutron [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.127958] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.908s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.128632] env[62204]: ERROR nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Traceback (most recent call last): [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self.driver.spawn(context, instance, image_meta, [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] vm_ref = self.build_virtual_machine(instance, [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.128632] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] for vif in network_info: [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return self._sync_wrapper(fn, *args, **kwargs) [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self.wait() [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self[:] = self._gt.wait() [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return self._exit_event.wait() [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] result = hub.switch() [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.128991] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return self.greenlet.switch() [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] result = function(*args, **kwargs) [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] return func(*args, **kwargs) [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] raise e [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] nwinfo = self.network_api.allocate_for_instance( [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] created_port_ids = self._update_ports_for_instance( [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] with excutils.save_and_reraise_exception(): [ 529.129432] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] self.force_reraise() [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] raise self.value [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] updated_port = self._update_port( [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] _ensure_no_port_binding_failure(port) [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] raise exception.PortBindingFailed(port_id=port['id']) [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] nova.exception.PortBindingFailed: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. [ 529.129795] env[62204]: ERROR nova.compute.manager [instance: 319eed10-3985-45c3-b864-7c984a8b9819] [ 529.130140] env[62204]: DEBUG nova.compute.utils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 529.131937] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Build of instance 319eed10-3985-45c3-b864-7c984a8b9819 was re-scheduled: Binding failed for port f6576648-ea3e-40e8-b6df-1fb33c94bc0b, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 529.131937] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 529.131937] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.132135] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquired lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.133581] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 529.134252] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.763s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.136111] env[62204]: INFO nova.compute.claims [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 529.139950] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Releasing lock "refresh_cache-b0365735-8e34-4129-93c0-d7c8e79fc5b2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 529.139950] env[62204]: DEBUG nova.compute.manager [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 529.140218] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 529.141624] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ae82da4-7750-4306-ad6f-914fb7bd6b50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.157123] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3156511-1a8d-4aa8-92d3-0a9ac2386aba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.183886] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0365735-8e34-4129-93c0-d7c8e79fc5b2 could not be found. [ 529.183886] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 529.183886] env[62204]: INFO nova.compute.manager [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 529.185169] env[62204]: DEBUG oslo.service.loopingcall [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 529.185169] env[62204]: DEBUG nova.compute.manager [-] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 529.185169] env[62204]: DEBUG nova.network.neutron [-] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 529.239162] env[62204]: DEBUG nova.network.neutron [-] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 529.338751] env[62204]: ERROR nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 529.338751] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.338751] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.338751] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.338751] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.338751] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.338751] env[62204]: ERROR nova.compute.manager raise self.value [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.338751] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.338751] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.338751] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.339291] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.339291] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.339291] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 529.339291] env[62204]: ERROR nova.compute.manager [ 529.339291] env[62204]: Traceback (most recent call last): [ 529.339291] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.339291] env[62204]: listener.cb(fileno) [ 529.339291] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.339291] env[62204]: result = function(*args, **kwargs) [ 529.339291] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.339291] env[62204]: return func(*args, **kwargs) [ 529.339291] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.339291] env[62204]: raise e [ 529.339291] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.339291] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 529.339291] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.339291] env[62204]: created_port_ids = self._update_ports_for_instance( [ 529.339291] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.339291] env[62204]: with excutils.save_and_reraise_exception(): [ 529.339291] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.339291] env[62204]: self.force_reraise() [ 529.339291] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.339291] env[62204]: raise self.value [ 529.339291] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.339291] env[62204]: updated_port = self._update_port( [ 529.339291] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.339291] env[62204]: _ensure_no_port_binding_failure(port) [ 529.339291] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.339291] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.340086] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 529.340086] env[62204]: Removing descriptor: 16 [ 529.340086] env[62204]: ERROR nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Traceback (most recent call last): [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] yield resources [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self.driver.spawn(context, instance, image_meta, [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.340086] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] vm_ref = self.build_virtual_machine(instance, [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] for vif in network_info: [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return self._sync_wrapper(fn, *args, **kwargs) [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self.wait() [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self[:] = self._gt.wait() [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return self._exit_event.wait() [ 529.340495] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] result = hub.switch() [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return self.greenlet.switch() [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] result = function(*args, **kwargs) [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return func(*args, **kwargs) [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] raise e [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] nwinfo = self.network_api.allocate_for_instance( [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.340889] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] created_port_ids = self._update_ports_for_instance( [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] with excutils.save_and_reraise_exception(): [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self.force_reraise() [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] raise self.value [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] updated_port = self._update_port( [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] _ensure_no_port_binding_failure(port) [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.341244] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] raise exception.PortBindingFailed(port_id=port['id']) [ 529.341547] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 529.341547] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] [ 529.341547] env[62204]: INFO nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Terminating instance [ 529.345507] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.345671] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquired lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.345830] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 529.544905] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199368, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.576404] env[62204]: INFO nova.compute.manager [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] Took 1.05 seconds to deallocate network for instance. [ 529.664214] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 529.746129] env[62204]: DEBUG nova.network.neutron [-] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.778293] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquiring lock "6c675e27-0de4-46bc-8017-5ee43e2efa5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.778533] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Lock "6c675e27-0de4-46bc-8017-5ee43e2efa5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.781887] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 529.872324] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.016719] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.049515] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199368, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514738} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.049721] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 530.049989] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 530.050247] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64ba61a9-d92c-4813-af6c-16e456015bf2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.059554] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 530.059554] env[62204]: value = "task-1199369" [ 530.059554] env[62204]: _type = "Task" [ 530.059554] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.069066] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.249617] env[62204]: INFO nova.compute.manager [-] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] Took 1.06 seconds to deallocate network for instance. [ 530.285363] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Releasing lock "refresh_cache-319eed10-3985-45c3-b864-7c984a8b9819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.285592] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 530.287270] env[62204]: DEBUG nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.288495] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 530.314712] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.496472] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd9f538-4ee9-4ae2-b769-016283bb5e72 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.508107] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8dc1609-44b2-4f33-aa5c-026ac14467db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.549334] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Releasing lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.549334] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 530.549334] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 530.549334] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5597ac06-6416-421b-b5d9-32369457a752 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.550627] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e52572-7b16-41f3-a3ef-fd4e22ba3cea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.566142] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e405a9bd-5cda-40df-a71b-500493e55d69 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.573836] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daca386b-c033-446b-8ae4-1d2242f139fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.614731] env[62204]: DEBUG nova.compute.provider_tree [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 530.619209] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e0a58fc7-7929-4e18-8cc9-1e5074123f06 could not be found. [ 530.619469] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 530.619664] env[62204]: INFO nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Took 0.07 seconds to destroy the instance on the hypervisor. [ 530.619933] env[62204]: DEBUG oslo.service.loopingcall [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.621905] env[62204]: DEBUG nova.compute.manager [-] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.622037] env[62204]: DEBUG nova.network.neutron [-] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 530.628529] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066081} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.629362] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 530.630262] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718393d2-6fcd-4f50-92c9-0cd436a8de64 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.642351] env[62204]: DEBUG nova.compute.manager [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Received event network-changed-339cf60f-af71-4eaa-8b7e-b1434526bbcf {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 530.642351] env[62204]: DEBUG nova.compute.manager [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Refreshing instance network info cache due to event network-changed-339cf60f-af71-4eaa-8b7e-b1434526bbcf. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 530.642351] env[62204]: DEBUG oslo_concurrency.lockutils [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] Acquiring lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.642351] env[62204]: DEBUG oslo_concurrency.lockutils [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] Acquired lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.642351] env[62204]: DEBUG nova.network.neutron [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Refreshing network info cache for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 530.665302] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 530.667084] env[62204]: INFO nova.scheduler.client.report [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Deleted allocations for instance 8b6abe21-275f-474d-801d-b94627e8e832 [ 530.675766] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d40ca112-4091-4f68-a8ff-bbf2a347aa4f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.707450] env[62204]: DEBUG nova.network.neutron [-] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.718728] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 530.718728] env[62204]: value = "task-1199370" [ 530.718728] env[62204]: _type = "Task" [ 530.718728] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.734099] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199370, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.823470] env[62204]: DEBUG nova.network.neutron [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.123200] env[62204]: DEBUG nova.scheduler.client.report [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 531.165055] env[62204]: DEBUG nova.network.neutron [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.176713] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5086bae4-73f7-41f9-8777-99777b4ee09a tempest-ServerExternalEventsTest-1104791498 tempest-ServerExternalEventsTest-1104791498-project-member] Lock "8b6abe21-275f-474d-801d-b94627e8e832" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.618s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.180162] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "8b6abe21-275f-474d-801d-b94627e8e832" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.290s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.180999] env[62204]: INFO nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 8b6abe21-275f-474d-801d-b94627e8e832] During sync_power_state the instance has a pending task (spawning). Skip. [ 531.180999] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "8b6abe21-275f-474d-801d-b94627e8e832" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.213469] env[62204]: DEBUG nova.network.neutron [-] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.230448] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199370, 'name': ReconfigVM_Task, 'duration_secs': 0.288311} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.230816] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 531.231390] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ee77cc5-f45f-4c98-b852-0415ec119bbf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.240975] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 531.240975] env[62204]: value = "task-1199371" [ 531.240975] env[62204]: _type = "Task" [ 531.240975] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.251675] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199371, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.302087] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5b37cda-c0b7-4100-bea4-fc47bc9456a1 tempest-ServersListShow296Test-1648664135 tempest-ServersListShow296Test-1648664135-project-member] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.545s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.304351] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.413s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.304460] env[62204]: INFO nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: b0365735-8e34-4129-93c0-d7c8e79fc5b2] During sync_power_state the instance has a pending task (deleting). Skip. [ 531.305092] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "b0365735-8e34-4129-93c0-d7c8e79fc5b2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.322926] env[62204]: DEBUG nova.network.neutron [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.330218] env[62204]: INFO nova.compute.manager [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] Took 1.04 seconds to deallocate network for instance. [ 531.345383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquiring lock "4a2b5dbe-ed48-40b6-ba72-a06b14e31696" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.345383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Lock "4a2b5dbe-ed48-40b6-ba72-a06b14e31696" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.628571] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 531.631833] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 531.636879] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.768s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 531.640161] env[62204]: INFO nova.compute.claims [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 531.682088] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 531.722544] env[62204]: INFO nova.compute.manager [-] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Took 1.10 seconds to deallocate network for instance. [ 531.728814] env[62204]: DEBUG nova.compute.claims [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 531.729878] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.753185] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199371, 'name': Rename_Task, 'duration_secs': 0.155051} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.753446] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 531.753770] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c54bb95e-08e2-4f69-b62d-988143587728 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.762304] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 531.762304] env[62204]: value = "task-1199372" [ 531.762304] env[62204]: _type = "Task" [ 531.762304] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.773938] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.828980] env[62204]: DEBUG oslo_concurrency.lockutils [req-f9b8c0ab-acfd-44e4-b47f-6fcd0d2c8419 req-4d2affa4-05cb-4cb4-9f7d-754cbef3e8fa service nova] Releasing lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.147660] env[62204]: DEBUG nova.compute.utils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 532.149901] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 532.150157] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 532.213890] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.256070] env[62204]: DEBUG nova.policy [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0f3d190e13254332aa1ecd1b6d68eb52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38d8f190ca31409ea30e8508e62bb073', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 532.277566] env[62204]: DEBUG oslo_vmware.api [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199372, 'name': PowerOnVM_Task, 'duration_secs': 0.473586} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.278167] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 532.278167] env[62204]: INFO nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Took 9.11 seconds to spawn the instance on the hypervisor. [ 532.278318] env[62204]: DEBUG nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 532.279660] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8839c6de-9379-4527-aee4-28bd2a7dce27 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.380450] env[62204]: INFO nova.scheduler.client.report [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Deleted allocations for instance 319eed10-3985-45c3-b864-7c984a8b9819 [ 532.652723] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 532.805344] env[62204]: INFO nova.compute.manager [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Took 19.85 seconds to build instance. [ 532.891026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d630e941-cc8c-4b9a-9f3a-3fe56f1bf92d tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "319eed10-3985-45c3-b864-7c984a8b9819" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.705s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.894266] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "319eed10-3985-45c3-b864-7c984a8b9819" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.003s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.894266] env[62204]: INFO nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 319eed10-3985-45c3-b864-7c984a8b9819] During sync_power_state the instance has a pending task (spawning). Skip. [ 532.894266] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "319eed10-3985-45c3-b864-7c984a8b9819" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.957188] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e515d57-c792-43ea-a40e-3ddea1b91165 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.966493] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b925933-c71f-472a-9ffe-107ec5c42e93 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.012105] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c4df6b-4eed-44c2-a397-184450c3add3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.019521] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0521a4-9881-4c68-83c7-50ff0cd7bd3d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.035870] env[62204]: DEBUG nova.compute.provider_tree [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 533.179271] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Successfully created port: 423ba96a-ddc9-4e32-a315-359fa67c151f {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 533.238438] env[62204]: DEBUG nova.compute.manager [req-91912a5e-0bf3-4002-8a10-c75472cf93b8 req-30b03110-e8b3-4541-9ce2-62b127af5e6e service nova] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Received event network-vif-deleted-339cf60f-af71-4eaa-8b7e-b1434526bbcf {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 533.309798] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6aff028a-444e-44be-bba3-9f887d81f639 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.363s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.312209] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.420s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.312209] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe0054a-b749-46a2-9e0e-e20b6fbfe1ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.396732] env[62204]: DEBUG nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 533.538969] env[62204]: DEBUG nova.scheduler.client.report [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 533.680796] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 533.718119] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 533.719180] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 533.719180] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 533.719180] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 533.719400] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 533.719431] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 533.719673] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 533.719842] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 533.719999] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 533.720166] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 533.720559] env[62204]: DEBUG nova.virt.hardware [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 533.722759] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9d943f-5937-4b09-bcaf-102a39d980b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.731633] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7093198-9307-4feb-aff8-299a6c19818b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.750595] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquiring lock "571b574b-27f2-4e95-9309-fd3097fb4f64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.750825] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Lock "571b574b-27f2-4e95-9309-fd3097fb4f64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.817434] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 533.825359] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.514s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.922550] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.045401] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.409s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 534.045914] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 534.049401] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.334s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.226252] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquiring lock "72514005-1023-4db6-9e51-9b0855083411" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.226493] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Lock "72514005-1023-4db6-9e51-9b0855083411" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.226832] env[62204]: INFO nova.compute.manager [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Rebuilding instance [ 534.297245] env[62204]: DEBUG nova.compute.manager [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 534.297245] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a580592c-74d0-4d0a-a1d6-13217876817a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.350623] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.553501] env[62204]: DEBUG nova.compute.utils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 534.553501] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 534.553501] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 534.683142] env[62204]: DEBUG nova.policy [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8133737e2af747228450921923b862c5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '630540a1758a446bb210f4cad17ab781', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 534.812528] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 534.812620] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-711a9a4b-3e02-4a22-a218-1b0edc70dae0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.824767] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 534.824767] env[62204]: value = "task-1199373" [ 534.824767] env[62204]: _type = "Task" [ 534.824767] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.833270] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.873444] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511514d7-484b-479f-839b-31558ae113cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.882219] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f710fbd-2caf-41c1-99ce-365254cf963b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.917550] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85244c2-2f0a-4d43-9f10-ab9fa4be2989 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.924732] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd10a0d0-ba1f-4be0-bb7d-d0a23658d9c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.942376] env[62204]: DEBUG nova.compute.provider_tree [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 534.953571] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquiring lock "47409cd0-db33-4a94-b806-1799a6f7e98f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.953904] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Lock "47409cd0-db33-4a94-b806-1799a6f7e98f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.059327] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 535.337125] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199373, 'name': PowerOffVM_Task, 'duration_secs': 0.135663} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.339342] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 535.339342] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 535.339342] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa175dbd-a329-4d45-aec6-376fdddc5910 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.346178] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 535.346427] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8e591a5-90cd-49d8-9139-4e3988e3b0ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.370663] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 535.370890] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 535.371517] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleting the datastore file [datastore1] 4644dfab-0758-43e6-bbcc-9930f086a4e5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 535.371517] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-49a3d673-2a2c-4fc2-add5-05a1435abe2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.377740] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 535.377740] env[62204]: value = "task-1199375" [ 535.377740] env[62204]: _type = "Task" [ 535.377740] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 535.385465] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 535.450994] env[62204]: DEBUG nova.scheduler.client.report [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.687879] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Successfully created port: e854ad35-bc66-487f-aa75-3e582f8b74c2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 535.889543] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107753} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.889814] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 535.889989] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 535.890169] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 535.958370] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.907s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.958370] env[62204]: ERROR nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Traceback (most recent call last): [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self.driver.spawn(context, instance, image_meta, [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self._vmops.spawn(context, instance, image_meta, injected_files, [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 535.958370] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] vm_ref = self.build_virtual_machine(instance, [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] vif_infos = vmwarevif.get_vif_info(self._session, [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] for vif in network_info: [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return self._sync_wrapper(fn, *args, **kwargs) [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self.wait() [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self[:] = self._gt.wait() [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return self._exit_event.wait() [ 535.958649] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] result = hub.switch() [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return self.greenlet.switch() [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] result = function(*args, **kwargs) [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] return func(*args, **kwargs) [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] raise e [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] nwinfo = self.network_api.allocate_for_instance( [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 535.959036] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] created_port_ids = self._update_ports_for_instance( [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] with excutils.save_and_reraise_exception(): [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] self.force_reraise() [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] raise self.value [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] updated_port = self._update_port( [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] _ensure_no_port_binding_failure(port) [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 535.959390] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] raise exception.PortBindingFailed(port_id=port['id']) [ 535.959730] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] nova.exception.PortBindingFailed: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. [ 535.959730] env[62204]: ERROR nova.compute.manager [instance: fdb9ae1a-a561-475c-9e13-803765c21582] [ 535.959730] env[62204]: DEBUG nova.compute.utils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 535.959730] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.716s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.962064] env[62204]: INFO nova.compute.claims [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 535.965066] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Build of instance fdb9ae1a-a561-475c-9e13-803765c21582 was re-scheduled: Binding failed for port 30ef29f8-f42b-42b9-b1e1-f1e95af0982c, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 535.965438] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 535.965676] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquiring lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 535.965851] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Acquired lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.966046] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 536.071271] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 536.120349] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 536.120617] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 536.120796] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 536.120991] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 536.121380] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 536.121512] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 536.121714] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 536.121860] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 536.122262] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 536.122262] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 536.122533] env[62204]: DEBUG nova.virt.hardware [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 536.123927] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78eb4f64-a0fe-4c55-8fe3-e976cc734ac1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.136212] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741c9ee3-9774-4588-a634-5588431b4bbb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.378735] env[62204]: DEBUG nova.compute.manager [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Received event network-changed-423ba96a-ddc9-4e32-a315-359fa67c151f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 536.378940] env[62204]: DEBUG nova.compute.manager [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Refreshing instance network info cache due to event network-changed-423ba96a-ddc9-4e32-a315-359fa67c151f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 536.379194] env[62204]: DEBUG oslo_concurrency.lockutils [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] Acquiring lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.379302] env[62204]: DEBUG oslo_concurrency.lockutils [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] Acquired lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 536.379456] env[62204]: DEBUG nova.network.neutron [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Refreshing network info cache for port 423ba96a-ddc9-4e32-a315-359fa67c151f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 536.558472] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.824888] env[62204]: ERROR nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 536.824888] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.824888] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.824888] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.824888] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.824888] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.824888] env[62204]: ERROR nova.compute.manager raise self.value [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.824888] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 536.824888] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.824888] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 536.825555] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.825555] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 536.825555] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 536.825555] env[62204]: ERROR nova.compute.manager [ 536.825555] env[62204]: Traceback (most recent call last): [ 536.825555] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 536.825555] env[62204]: listener.cb(fileno) [ 536.825555] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.825555] env[62204]: result = function(*args, **kwargs) [ 536.825555] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.825555] env[62204]: return func(*args, **kwargs) [ 536.825555] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.825555] env[62204]: raise e [ 536.825555] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.825555] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 536.825555] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.825555] env[62204]: created_port_ids = self._update_ports_for_instance( [ 536.825555] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.825555] env[62204]: with excutils.save_and_reraise_exception(): [ 536.825555] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.825555] env[62204]: self.force_reraise() [ 536.825555] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.825555] env[62204]: raise self.value [ 536.825555] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.825555] env[62204]: updated_port = self._update_port( [ 536.825555] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.825555] env[62204]: _ensure_no_port_binding_failure(port) [ 536.825555] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.825555] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 536.826340] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 536.826340] env[62204]: Removing descriptor: 16 [ 536.826762] env[62204]: ERROR nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] Traceback (most recent call last): [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] yield resources [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self.driver.spawn(context, instance, image_meta, [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self._vmops.spawn(context, instance, image_meta, injected_files, [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] vm_ref = self.build_virtual_machine(instance, [ 536.826762] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] vif_infos = vmwarevif.get_vif_info(self._session, [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] for vif in network_info: [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return self._sync_wrapper(fn, *args, **kwargs) [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self.wait() [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self[:] = self._gt.wait() [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return self._exit_event.wait() [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 536.827086] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] result = hub.switch() [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return self.greenlet.switch() [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] result = function(*args, **kwargs) [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return func(*args, **kwargs) [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] raise e [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] nwinfo = self.network_api.allocate_for_instance( [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] created_port_ids = self._update_ports_for_instance( [ 536.827435] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] with excutils.save_and_reraise_exception(): [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self.force_reraise() [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] raise self.value [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] updated_port = self._update_port( [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] _ensure_no_port_binding_failure(port) [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] raise exception.PortBindingFailed(port_id=port['id']) [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 536.827783] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] [ 536.828131] env[62204]: INFO nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Terminating instance [ 536.831052] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 536.934402] env[62204]: DEBUG nova.network.neutron [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.938557] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 536.939883] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 536.940230] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 536.940603] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 536.940965] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 536.941013] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 536.941342] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 536.941628] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 536.941849] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 536.942060] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 536.942277] env[62204]: DEBUG nova.virt.hardware [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 536.943554] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df31d638-ef4c-4193-b872-418a472ce4dc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.954180] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af4e768-b48b-4348-ab9c-5e5349a7476f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.977286] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 536.984537] env[62204]: DEBUG oslo.service.loopingcall [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 536.985895] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.986827] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 536.987060] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8141a2f4-7888-42a2-9215-adff23c1b3bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.002622] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Releasing lock "refresh_cache-fdb9ae1a-a561-475c-9e13-803765c21582" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.002906] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 537.003103] env[62204]: DEBUG nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 537.003593] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 537.013506] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 537.013506] env[62204]: value = "task-1199376" [ 537.013506] env[62204]: _type = "Task" [ 537.013506] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.022738] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199376, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.062307] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 537.313954] env[62204]: DEBUG nova.network.neutron [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.351251] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b68e07a-8ba0-45ee-87f6-6b1920ef9026 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.361554] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c977dad3-4449-421b-ae9d-ae56322f6742 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.399857] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a299273f-c2df-4048-94c8-333a6d767f75 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.408881] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad9a67f-2cd2-433b-99a7-f7d0c0f8352e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.425428] env[62204]: DEBUG nova.compute.provider_tree [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.524094] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199376, 'name': CreateVM_Task, 'duration_secs': 0.273145} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 537.525238] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 537.525238] env[62204]: DEBUG oslo_vmware.service [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad16653-92ef-4da2-ac92-15f808f1ac3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.531105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.531275] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.531944] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 537.532486] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f16e39e2-dd79-453c-946a-1ebac23c6fa1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.538367] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 537.538367] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f9a20a-205b-b97f-33a9-53abe6d46609" [ 537.538367] env[62204]: _type = "Task" [ 537.538367] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 537.548106] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f9a20a-205b-b97f-33a9-53abe6d46609, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 537.575035] env[62204]: DEBUG nova.network.neutron [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.819375] env[62204]: DEBUG oslo_concurrency.lockutils [req-677eae9c-aec2-4339-a244-c36484708fc8 req-0ada11cf-4aa8-4609-a1df-e8fbfe9a8b90 service nova] Releasing lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 537.819802] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquired lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.819990] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 537.927863] env[62204]: DEBUG nova.scheduler.client.report [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 538.047344] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 538.047625] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 538.048043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.048043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.048204] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 538.048432] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ece88033-56bd-4b9c-8e35-eddd169bd657 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.066696] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 538.066891] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 538.067707] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d80a49c-01cc-4613-9c57-a4002b4336a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.077207] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8967ff5-9f7b-4b3e-822f-1fc5be7fb604 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.081982] env[62204]: INFO nova.compute.manager [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] Took 1.08 seconds to deallocate network for instance. [ 538.088684] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 538.088684] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520142fd-d1a8-8a3c-4ba5-b589c5e75e05" [ 538.088684] env[62204]: _type = "Task" [ 538.088684] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.099147] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520142fd-d1a8-8a3c-4ba5-b589c5e75e05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.360917] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.435022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.437927] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 538.441967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.044s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.441967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 538.442103] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 538.442410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.278s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.448173] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f2571d-66f0-40ec-9940-ae8627326e66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.463489] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5de6eb2-a018-450b-aab1-0b9c44de9bc4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.484626] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c78a89-95d8-4bb7-84fd-612005d33360 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.494952] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b1be72-fc5a-4c2f-a1c4-d6748791e80a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.527704] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181445MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 538.527986] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.532662] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.602191] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 538.602368] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating directory with path [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 538.609089] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d10aceff-cdb1-4735-a366-8f521a1bcf4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.629281] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Created directory with path [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 538.629465] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Fetch image to [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 538.629587] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Downloading image file data c0e4d3a1-f965-49e2-ab05-fbf425872dcc to [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk on the data store datastore2 {{(pid=62204) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 538.630428] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d334ddbf-497b-4b85-837d-148909a94ec3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.640989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae7da49-cb22-4acb-99a2-d91332d9fb0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.654191] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32456326-5eb9-40d9-ac70-83fbd838bbd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.694523] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64744dc-9477-47cc-b527-cda445f18123 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.698368] env[62204]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b7ff19e6-be5a-4ca7-9c79-c807334e6770 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.729665] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Downloading image file data c0e4d3a1-f965-49e2-ab05-fbf425872dcc to the data store datastore2 {{(pid=62204) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 538.811701] env[62204]: DEBUG oslo_vmware.rw_handles [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 538.910037] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "d52bfb49-beb0-4bfe-b3bb-45132c210065" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.910949] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "d52bfb49-beb0-4bfe-b3bb-45132c210065" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.950798] env[62204]: DEBUG nova.compute.utils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.953008] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 538.953242] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 539.035679] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Releasing lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.036013] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 539.036323] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 539.040621] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e306d7a-6f0f-49ec-961a-d8da391f8b1d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.056214] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a704e0d-165d-4a39-ba61-61008d6c1547 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.088965] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 62721b0d-0763-43ae-b221-271266bf8794 could not be found. [ 539.090031] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 539.090031] env[62204]: INFO nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Took 0.05 seconds to destroy the instance on the hypervisor. [ 539.090031] env[62204]: DEBUG oslo.service.loopingcall [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.090860] env[62204]: DEBUG nova.compute.manager [-] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.091132] env[62204]: DEBUG nova.network.neutron [-] [instance: 62721b0d-0763-43ae-b221-271266bf8794] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.134514] env[62204]: INFO nova.scheduler.client.report [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Deleted allocations for instance fdb9ae1a-a561-475c-9e13-803765c21582 [ 539.164880] env[62204]: DEBUG nova.policy [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62a3822ba1f8458da3f69319712759eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2c9a16c9e034bfe940de3c83f7462b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 539.172128] env[62204]: DEBUG nova.network.neutron [-] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.409273] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83b6397-aeb8-4880-a647-f2311cec8d9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.420508] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5246a01b-689d-4c37-bb5c-636e485d0302 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.458689] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82680d4-3047-4804-ace7-1ecef68185b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.461901] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 539.471804] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b1a6b6-e418-4aee-a54e-cace73cd58a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.490455] env[62204]: DEBUG nova.compute.provider_tree [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.514703] env[62204]: DEBUG oslo_vmware.rw_handles [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 539.514931] env[62204]: DEBUG oslo_vmware.rw_handles [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 539.648233] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d163cd04-eaed-4c43-a383-2640dc10940c tempest-ServerRescueTestJSONUnderV235-1452801103 tempest-ServerRescueTestJSONUnderV235-1452801103-project-member] Lock "fdb9ae1a-a561-475c-9e13-803765c21582" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.665s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.653019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "fdb9ae1a-a561-475c-9e13-803765c21582" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.759s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.653019] env[62204]: INFO nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: fdb9ae1a-a561-475c-9e13-803765c21582] During sync_power_state the instance has a pending task (spawning). Skip. [ 539.653019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "fdb9ae1a-a561-475c-9e13-803765c21582" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 539.663867] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Downloaded image file data c0e4d3a1-f965-49e2-ab05-fbf425872dcc to vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk on the data store datastore2 {{(pid=62204) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 539.666234] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 539.666593] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copying Virtual Disk [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk to [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 539.667301] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f6abfcd-63fd-472c-812e-0e2c7314a892 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.676213] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 539.676213] env[62204]: value = "task-1199377" [ 539.676213] env[62204]: _type = "Task" [ 539.676213] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.679772] env[62204]: DEBUG nova.network.neutron [-] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.686967] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.928253] env[62204]: DEBUG nova.compute.manager [req-da2036e7-22d8-4682-88d8-23dcd849e116 req-73725d9a-4d97-4097-a361-e4fecc2a7a54 service nova] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Received event network-vif-deleted-423ba96a-ddc9-4e32-a315-359fa67c151f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 539.995039] env[62204]: DEBUG nova.scheduler.client.report [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 540.152453] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 540.183128] env[62204]: INFO nova.compute.manager [-] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Took 1.09 seconds to deallocate network for instance. [ 540.197767] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199377, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.197767] env[62204]: DEBUG nova.compute.claims [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 540.197767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.409997] env[62204]: ERROR nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 540.409997] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.409997] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.409997] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.409997] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.409997] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.409997] env[62204]: ERROR nova.compute.manager raise self.value [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.409997] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 540.409997] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.409997] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 540.410517] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.410517] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 540.410517] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 540.410517] env[62204]: ERROR nova.compute.manager [ 540.411070] env[62204]: Traceback (most recent call last): [ 540.411171] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 540.411171] env[62204]: listener.cb(fileno) [ 540.411171] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.411171] env[62204]: result = function(*args, **kwargs) [ 540.411171] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.411171] env[62204]: return func(*args, **kwargs) [ 540.411171] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.411171] env[62204]: raise e [ 540.411171] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.411171] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 540.411171] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.411171] env[62204]: created_port_ids = self._update_ports_for_instance( [ 540.411171] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.411171] env[62204]: with excutils.save_and_reraise_exception(): [ 540.411171] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.411171] env[62204]: self.force_reraise() [ 540.411171] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.411171] env[62204]: raise self.value [ 540.411171] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.411171] env[62204]: updated_port = self._update_port( [ 540.411171] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.411171] env[62204]: _ensure_no_port_binding_failure(port) [ 540.411171] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.411171] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 540.411171] env[62204]: nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 540.411171] env[62204]: Removing descriptor: 14 [ 540.412457] env[62204]: ERROR nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Traceback (most recent call last): [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] yield resources [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self.driver.spawn(context, instance, image_meta, [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] vm_ref = self.build_virtual_machine(instance, [ 540.412457] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] for vif in network_info: [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return self._sync_wrapper(fn, *args, **kwargs) [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self.wait() [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self[:] = self._gt.wait() [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return self._exit_event.wait() [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 540.412845] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] result = hub.switch() [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return self.greenlet.switch() [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] result = function(*args, **kwargs) [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return func(*args, **kwargs) [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] raise e [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] nwinfo = self.network_api.allocate_for_instance( [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] created_port_ids = self._update_ports_for_instance( [ 540.413226] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] with excutils.save_and_reraise_exception(): [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self.force_reraise() [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] raise self.value [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] updated_port = self._update_port( [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] _ensure_no_port_binding_failure(port) [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] raise exception.PortBindingFailed(port_id=port['id']) [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 540.413618] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] [ 540.414059] env[62204]: INFO nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Terminating instance [ 540.415677] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Successfully created port: aee748ca-32b6-4422-8048-690ec8cdbced {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 540.417995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquiring lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.418666] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquired lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.418666] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 540.478341] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 540.500348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.058s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.500970] env[62204]: ERROR nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Traceback (most recent call last): [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self.driver.spawn(context, instance, image_meta, [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self._vmops.spawn(context, instance, image_meta, injected_files, [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] vm_ref = self.build_virtual_machine(instance, [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] vif_infos = vmwarevif.get_vif_info(self._session, [ 540.500970] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] for vif in network_info: [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return self._sync_wrapper(fn, *args, **kwargs) [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self.wait() [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self[:] = self._gt.wait() [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return self._exit_event.wait() [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] result = hub.switch() [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 540.501366] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return self.greenlet.switch() [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] result = function(*args, **kwargs) [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] return func(*args, **kwargs) [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] raise e [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] nwinfo = self.network_api.allocate_for_instance( [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] created_port_ids = self._update_ports_for_instance( [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] with excutils.save_and_reraise_exception(): [ 540.501808] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] self.force_reraise() [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] raise self.value [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] updated_port = self._update_port( [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] _ensure_no_port_binding_failure(port) [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] raise exception.PortBindingFailed(port_id=port['id']) [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] nova.exception.PortBindingFailed: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. [ 540.502199] env[62204]: ERROR nova.compute.manager [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] [ 540.502546] env[62204]: DEBUG nova.compute.utils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 540.503748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.711s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.505858] env[62204]: INFO nova.compute.claims [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.514604] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 540.514604] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 540.514604] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 540.514743] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 540.514743] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 540.514743] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 540.514743] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 540.514743] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 540.514893] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 540.514893] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 540.514893] env[62204]: DEBUG nova.virt.hardware [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 540.518784] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Build of instance 04f9b938-7bcd-4b49-9115-0693fb478326 was re-scheduled: Binding failed for port 7ffbdb3c-f538-4c9b-bf17-0b8afdb74418, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 540.518784] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 540.518784] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquiring lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 540.518784] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Acquired lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 540.519359] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 540.519359] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f934d807-478e-4094-b3fd-54bbe024a73e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.528542] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95974efe-46db-4efa-b8e0-3db800da031e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.582841] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.583255] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.611489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "292b9c31-2ea1-4b28-8b60-79c6c80e1531" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.611803] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "292b9c31-2ea1-4b28-8b60-79c6c80e1531" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.683536] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.700359] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658545} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.700359] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copied Virtual Disk [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk to [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 540.700359] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleting the datastore file [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/tmp-sparse.vmdk {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 540.700359] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff710ea2-af4e-436a-b852-b9e2e7189849 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.706326] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 540.706326] env[62204]: value = "task-1199378" [ 540.706326] env[62204]: _type = "Task" [ 540.706326] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.716476] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.968782] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.068200] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.119385] env[62204]: DEBUG nova.compute.manager [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Received event network-changed-e854ad35-bc66-487f-aa75-3e582f8b74c2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 541.119385] env[62204]: DEBUG nova.compute.manager [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Refreshing instance network info cache due to event network-changed-e854ad35-bc66-487f-aa75-3e582f8b74c2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 541.119385] env[62204]: DEBUG oslo_concurrency.lockutils [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] Acquiring lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 541.174391] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.221888] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022407} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.222366] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 541.222819] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Moving file from [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147/c0e4d3a1-f965-49e2-ab05-fbf425872dcc to [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc. {{(pid=62204) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 541.225787] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c4ed3cce-6d27-4408-af77-16b5076f21fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.230115] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 541.230115] env[62204]: value = "task-1199379" [ 541.230115] env[62204]: _type = "Task" [ 541.230115] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.241355] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199379, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.408018] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 541.681714] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Releasing lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.682136] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 541.682333] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 541.683033] env[62204]: DEBUG oslo_concurrency.lockutils [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] Acquired lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 541.683033] env[62204]: DEBUG nova.network.neutron [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Refreshing network info cache for port e854ad35-bc66-487f-aa75-3e582f8b74c2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 541.684085] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0cd2dce-de9c-4e93-b341-25b1738bd0a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.695135] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937003af-d8ad-40aa-add5-3b62c82470e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.722401] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7f15c60-04ac-429e-b16f-8774f9a050b8 could not be found. [ 541.722690] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 541.722919] env[62204]: INFO nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 541.723170] env[62204]: DEBUG oslo.service.loopingcall [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 541.723381] env[62204]: DEBUG nova.compute.manager [-] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 541.723507] env[62204]: DEBUG nova.network.neutron [-] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 541.746240] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199379, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.029399} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.746554] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] File moved {{(pid=62204) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 541.746772] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Cleaning up location [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 541.746887] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleting the datastore file [datastore2] vmware_temp/b1ba9dc0-e040-406a-8a4f-df85fbe2d147 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 541.749843] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3110cec1-6fe4-44eb-b85e-1880eaeabc52 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.756491] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 541.756491] env[62204]: value = "task-1199380" [ 541.756491] env[62204]: _type = "Task" [ 541.756491] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.774079] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.774592] env[62204]: DEBUG nova.network.neutron [-] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.910366] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Releasing lock "refresh_cache-04f9b938-7bcd-4b49-9115-0693fb478326" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 541.910475] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 541.910622] env[62204]: DEBUG nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 541.910789] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 541.929083] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb7f1d7-e359-474d-9d80-b9ee12fa97e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.938143] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3065ce-8ef8-4c76-8c05-955196ae9c07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.974687] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 541.976234] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce1cea7-8c93-4883-af2b-1313dcf52bd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.984276] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fa7fbd-0dee-44b1-bc25-e1ff95419ddd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.000889] env[62204]: DEBUG nova.compute.provider_tree [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.154573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquiring lock "25e4a40e-c5a6-47f5-9e53-00d3073252fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.155028] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Lock "25e4a40e-c5a6-47f5-9e53-00d3073252fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.233315] env[62204]: DEBUG nova.network.neutron [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 542.281724] env[62204]: DEBUG nova.network.neutron [-] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.283540] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026781} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.285365] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 542.286304] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e82f80ba-eccf-419f-986a-38f5cfcfeec8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.294535] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 542.294535] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5267ced4-51ed-58ee-0de5-59bfcb687207" [ 542.294535] env[62204]: _type = "Task" [ 542.294535] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.303167] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5267ced4-51ed-58ee-0de5-59bfcb687207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.437904] env[62204]: DEBUG nova.network.neutron [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.480215] env[62204]: DEBUG nova.network.neutron [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 542.503457] env[62204]: DEBUG nova.scheduler.client.report [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.788546] env[62204]: INFO nova.compute.manager [-] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Took 1.06 seconds to deallocate network for instance. [ 542.789677] env[62204]: DEBUG nova.compute.claims [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 542.789677] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.805633] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5267ced4-51ed-58ee-0de5-59bfcb687207, 'name': SearchDatastore_Task, 'duration_secs': 0.009014} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.805902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.806163] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 542.806406] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbacc50b-68bc-4b0d-854b-0995f7753a23 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.813097] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 542.813097] env[62204]: value = "task-1199381" [ 542.813097] env[62204]: _type = "Task" [ 542.813097] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.825501] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.941809] env[62204]: DEBUG oslo_concurrency.lockutils [req-67f900f1-4d2b-48bb-818e-fb246383491f req-8df78a5a-eb16-406e-a0a1-2e01490d4e00 service nova] Releasing lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 542.987022] env[62204]: INFO nova.compute.manager [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] Took 1.07 seconds to deallocate network for instance. [ 543.011426] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.508s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.011978] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 543.014840] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.286s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.326587] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457363} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.326955] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 543.327258] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 543.327595] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a94d426b-3f89-4b12-adcf-d2edfe142741 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.334716] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 543.334716] env[62204]: value = "task-1199382" [ 543.334716] env[62204]: _type = "Task" [ 543.334716] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 543.346776] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.521914] env[62204]: DEBUG nova.compute.utils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.530525] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 543.530525] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 543.774663] env[62204]: DEBUG nova.policy [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8aa862b4e2ef458ca3d8d52ef1fb094f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f785265ade7a444bbc1112585f808bb4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 543.848508] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.867753] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ccfb28e-8300-49dc-b8c2-3b780b9fb57b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.876335] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcd481f-7c06-4aa7-a746-c14b5bdcaf5a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.908151] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8e9cfe-cc44-4bef-ad06-ea528c0067cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.915170] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a0114b-ef0e-4282-9f11-5bc70aac1309 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.928996] env[62204]: DEBUG nova.compute.provider_tree [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.949843] env[62204]: DEBUG nova.compute.manager [req-cfddd18a-b8d9-4bfa-8bc5-cb6ccfca074b req-52583594-063d-4337-a2f9-4bfdad913486 service nova] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Received event network-vif-deleted-e854ad35-bc66-487f-aa75-3e582f8b74c2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 544.030474] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 544.043877] env[62204]: INFO nova.scheduler.client.report [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Deleted allocations for instance 04f9b938-7bcd-4b49-9115-0693fb478326 [ 544.347386] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.696365} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.347652] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 544.348721] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef774fab-a9e6-41e0-b5f8-e1d53488bbd4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.371422] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 544.371752] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35616574-dda4-48a5-9913-a36c04a045c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.395229] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 544.395229] env[62204]: value = "task-1199383" [ 544.395229] env[62204]: _type = "Task" [ 544.395229] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.410809] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199383, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.434304] env[62204]: DEBUG nova.scheduler.client.report [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.559068] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad6ed403-3a77-479a-9d4d-5d8485cafc10 tempest-ServersAdminNegativeTestJSON-820979216 tempest-ServersAdminNegativeTestJSON-820979216-project-member] Lock "04f9b938-7bcd-4b49-9115-0693fb478326" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.668s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.560143] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "04f9b938-7bcd-4b49-9115-0693fb478326" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 23.669s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.560143] env[62204]: INFO nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 04f9b938-7bcd-4b49-9115-0693fb478326] During sync_power_state the instance has a pending task (spawning). Skip. [ 544.560664] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "04f9b938-7bcd-4b49-9115-0693fb478326" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.751796] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "f5a11b74-e1fe-44c0-bccb-5817bc582608" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.752706] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "f5a11b74-e1fe-44c0-bccb-5817bc582608" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.908306] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199383, 'name': ReconfigVM_Task, 'duration_secs': 0.415315} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 544.908541] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 544.912017] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0550b41-b05d-42cf-b9b6-49975df444df {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.916590] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 544.916590] env[62204]: value = "task-1199384" [ 544.916590] env[62204]: _type = "Task" [ 544.916590] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.924245] env[62204]: DEBUG nova.compute.manager [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Received event network-changed-aee748ca-32b6-4422-8048-690ec8cdbced {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 544.924434] env[62204]: DEBUG nova.compute.manager [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Refreshing instance network info cache due to event network-changed-aee748ca-32b6-4422-8048-690ec8cdbced. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 544.924636] env[62204]: DEBUG oslo_concurrency.lockutils [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] Acquiring lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.924766] env[62204]: DEBUG oslo_concurrency.lockutils [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] Acquired lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.924939] env[62204]: DEBUG nova.network.neutron [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Refreshing network info cache for port aee748ca-32b6-4422-8048-690ec8cdbced {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 544.938904] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199384, 'name': Rename_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.940415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.926s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.941203] env[62204]: ERROR nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Traceback (most recent call last): [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self.driver.spawn(context, instance, image_meta, [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] vm_ref = self.build_virtual_machine(instance, [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] vif_infos = vmwarevif.get_vif_info(self._session, [ 544.941203] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] for vif in network_info: [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return self._sync_wrapper(fn, *args, **kwargs) [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self.wait() [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self[:] = self._gt.wait() [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return self._exit_event.wait() [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] result = hub.switch() [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 544.943319] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return self.greenlet.switch() [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] result = function(*args, **kwargs) [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] return func(*args, **kwargs) [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] raise e [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] nwinfo = self.network_api.allocate_for_instance( [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] created_port_ids = self._update_ports_for_instance( [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] with excutils.save_and_reraise_exception(): [ 544.944035] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] self.force_reraise() [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] raise self.value [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] updated_port = self._update_port( [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] _ensure_no_port_binding_failure(port) [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] raise exception.PortBindingFailed(port_id=port['id']) [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] nova.exception.PortBindingFailed: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. [ 544.944408] env[62204]: ERROR nova.compute.manager [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] [ 544.944792] env[62204]: DEBUG nova.compute.utils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 544.945832] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.732s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.947227] env[62204]: INFO nova.compute.claims [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 544.950609] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Build of instance e0a58fc7-7929-4e18-8cc9-1e5074123f06 was re-scheduled: Binding failed for port 339cf60f-af71-4eaa-8b7e-b1434526bbcf, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 544.953516] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 544.953516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.953516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquired lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.953516] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.013848] env[62204]: ERROR nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 545.013848] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.013848] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.013848] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.013848] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.013848] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.013848] env[62204]: ERROR nova.compute.manager raise self.value [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.013848] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 545.013848] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.013848] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 545.014391] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.014391] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 545.014391] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 545.014391] env[62204]: ERROR nova.compute.manager [ 545.014391] env[62204]: Traceback (most recent call last): [ 545.014391] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 545.014391] env[62204]: listener.cb(fileno) [ 545.014391] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.014391] env[62204]: result = function(*args, **kwargs) [ 545.014391] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.014391] env[62204]: return func(*args, **kwargs) [ 545.014391] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.014391] env[62204]: raise e [ 545.014391] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.014391] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 545.014391] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.014391] env[62204]: created_port_ids = self._update_ports_for_instance( [ 545.014391] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.014391] env[62204]: with excutils.save_and_reraise_exception(): [ 545.014391] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.014391] env[62204]: self.force_reraise() [ 545.014391] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.014391] env[62204]: raise self.value [ 545.014391] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.014391] env[62204]: updated_port = self._update_port( [ 545.014391] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.014391] env[62204]: _ensure_no_port_binding_failure(port) [ 545.014391] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.014391] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 545.015208] env[62204]: nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 545.015208] env[62204]: Removing descriptor: 16 [ 545.015208] env[62204]: ERROR nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Traceback (most recent call last): [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] yield resources [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self.driver.spawn(context, instance, image_meta, [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.015208] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] vm_ref = self.build_virtual_machine(instance, [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] for vif in network_info: [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return self._sync_wrapper(fn, *args, **kwargs) [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self.wait() [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self[:] = self._gt.wait() [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return self._exit_event.wait() [ 545.015556] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] result = hub.switch() [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return self.greenlet.switch() [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] result = function(*args, **kwargs) [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return func(*args, **kwargs) [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] raise e [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] nwinfo = self.network_api.allocate_for_instance( [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.015924] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] created_port_ids = self._update_ports_for_instance( [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] with excutils.save_and_reraise_exception(): [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self.force_reraise() [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] raise self.value [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] updated_port = self._update_port( [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] _ensure_no_port_binding_failure(port) [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.016310] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] raise exception.PortBindingFailed(port_id=port['id']) [ 545.016650] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 545.016650] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] [ 545.016650] env[62204]: INFO nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Terminating instance [ 545.018689] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquiring lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.049583] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 545.062873] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 545.085339] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 545.085647] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 545.086738] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 545.086738] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 545.086738] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 545.086738] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 545.087044] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 545.087044] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 545.087215] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 545.087322] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 545.088248] env[62204]: DEBUG nova.virt.hardware [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 545.089565] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c8d26a-f30f-4784-b4ac-856ffd642953 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.098632] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058c1d45-8d3b-4fd0-b38f-9c793e05ac8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.249072] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Successfully created port: ac76cc56-d0e7-4296-8645-969dbb6f08ed {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.432652] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199384, 'name': Rename_Task, 'duration_secs': 0.133817} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.432964] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 545.434110] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8ef77c7-4065-4435-9da1-ba024a5da1ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.442086] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 545.442086] env[62204]: value = "task-1199385" [ 545.442086] env[62204]: _type = "Task" [ 545.442086] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.450485] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.476358] env[62204]: DEBUG nova.network.neutron [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.514012] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.599782] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.747435] env[62204]: DEBUG nova.network.neutron [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.958596] env[62204]: DEBUG oslo_vmware.api [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199385, 'name': PowerOnVM_Task, 'duration_secs': 0.415836} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.962562] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 545.962926] env[62204]: DEBUG nova.compute.manager [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 545.964610] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90779b75-31fc-409e-8868-6b547d78c057 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.000903] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.257528] env[62204]: DEBUG oslo_concurrency.lockutils [req-001a52c1-4254-4c6f-ade8-82af83978ae4 req-bd274495-9faa-47a5-b928-eb1703b89733 service nova] Releasing lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.258783] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquired lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 546.258783] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 546.407543] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37961315-60fc-4170-89ab-ebb59297e12e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.415210] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412dd8df-0b47-4675-8aad-2d09f891dcd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.447680] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fe73fa-031b-4828-b1a4-0e2d35dbb12b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.456021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd3c9cd-607f-4ed4-b005-a85b30c8eecb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.471344] env[62204]: DEBUG nova.compute.provider_tree [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.484856] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.506821] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Releasing lock "refresh_cache-e0a58fc7-7929-4e18-8cc9-1e5074123f06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.507087] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 546.507319] env[62204]: DEBUG nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 546.507592] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 546.575575] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.917126] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.975035] env[62204]: DEBUG nova.scheduler.client.report [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 547.082148] env[62204]: DEBUG nova.network.neutron [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.352964] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.484825] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.485486] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 547.489968] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.568s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.491832] env[62204]: INFO nova.compute.claims [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 547.583941] env[62204]: INFO nova.compute.manager [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: e0a58fc7-7929-4e18-8cc9-1e5074123f06] Took 1.08 seconds to deallocate network for instance. [ 547.855352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Releasing lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.855769] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 547.855955] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 547.856273] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91f513ff-156b-436c-9284-3e22338ab93e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.869022] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f93f59-563f-4258-b67c-283183cbb8c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.894974] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6e999bd0-38be-42d5-b2b7-3f9196fb941e could not be found. [ 547.895212] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 547.895251] env[62204]: INFO nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 547.895562] env[62204]: DEBUG oslo.service.loopingcall [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.896156] env[62204]: DEBUG nova.compute.manager [-] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 547.896156] env[62204]: DEBUG nova.network.neutron [-] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.945098] env[62204]: DEBUG nova.network.neutron [-] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.993368] env[62204]: DEBUG nova.compute.utils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.993368] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 547.993368] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 548.182749] env[62204]: DEBUG nova.policy [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52beed299e6a444c95161c4c35204284', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9001bede722d4f6d9cae0ffc863467d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 548.400067] env[62204]: ERROR nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 548.400067] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.400067] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.400067] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.400067] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.400067] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.400067] env[62204]: ERROR nova.compute.manager raise self.value [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.400067] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 548.400067] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.400067] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 548.400616] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.400616] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 548.400616] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 548.400616] env[62204]: ERROR nova.compute.manager [ 548.400616] env[62204]: Traceback (most recent call last): [ 548.400616] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 548.400616] env[62204]: listener.cb(fileno) [ 548.400616] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.400616] env[62204]: result = function(*args, **kwargs) [ 548.400616] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.400616] env[62204]: return func(*args, **kwargs) [ 548.400616] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.400616] env[62204]: raise e [ 548.400616] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.400616] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 548.400616] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.400616] env[62204]: created_port_ids = self._update_ports_for_instance( [ 548.400616] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.400616] env[62204]: with excutils.save_and_reraise_exception(): [ 548.400616] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.400616] env[62204]: self.force_reraise() [ 548.400616] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.400616] env[62204]: raise self.value [ 548.400616] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.400616] env[62204]: updated_port = self._update_port( [ 548.400616] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.400616] env[62204]: _ensure_no_port_binding_failure(port) [ 548.400616] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.400616] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 548.401504] env[62204]: nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 548.401504] env[62204]: Removing descriptor: 17 [ 548.401504] env[62204]: ERROR nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Traceback (most recent call last): [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] yield resources [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self.driver.spawn(context, instance, image_meta, [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.401504] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] vm_ref = self.build_virtual_machine(instance, [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] for vif in network_info: [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return self._sync_wrapper(fn, *args, **kwargs) [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self.wait() [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self[:] = self._gt.wait() [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return self._exit_event.wait() [ 548.401874] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] result = hub.switch() [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return self.greenlet.switch() [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] result = function(*args, **kwargs) [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return func(*args, **kwargs) [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] raise e [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] nwinfo = self.network_api.allocate_for_instance( [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.402330] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] created_port_ids = self._update_ports_for_instance( [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] with excutils.save_and_reraise_exception(): [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self.force_reraise() [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] raise self.value [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] updated_port = self._update_port( [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] _ensure_no_port_binding_failure(port) [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.402693] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] raise exception.PortBindingFailed(port_id=port['id']) [ 548.403191] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 548.403191] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] [ 548.403191] env[62204]: INFO nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Terminating instance [ 548.407566] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquiring lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.407718] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquired lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.407881] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.448079] env[62204]: DEBUG nova.network.neutron [-] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.498443] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 548.647660] env[62204]: INFO nova.scheduler.client.report [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Deleted allocations for instance e0a58fc7-7929-4e18-8cc9-1e5074123f06 [ 548.953860] env[62204]: INFO nova.compute.manager [-] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Took 1.06 seconds to deallocate network for instance. [ 548.957741] env[62204]: DEBUG nova.compute.claims [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 548.958262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.035894] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4ba258-b6fe-486e-a2e4-da768436d8f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.044246] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef202fd2-5f9e-4b70-83b6-34593f997e6b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.079713] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36466d71-c2e0-4b91-8f1c-c468c0fbfd86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.088293] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe70bec-dd9b-465d-b170-964e9f02bc78 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.103969] env[62204]: DEBUG nova.compute.provider_tree [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.105625] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.156581] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1ee0f59-a88a-4d4d-b00f-d913d57f329a tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "e0a58fc7-7929-4e18-8cc9-1e5074123f06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.052s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.337378] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.372270] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "5e264b99-8025-471a-bc6b-238f1bca054c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.372270] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "5e264b99-8025-471a-bc6b-238f1bca054c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.414448] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Successfully created port: 85984b65-22df-43df-9e1b-1e7316bb64d1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.419418] env[62204]: DEBUG nova.compute.manager [req-6e8c8794-02b7-42d2-b50c-966c03a18f21 req-cb6ab31c-067b-496f-b4ff-557ad5e9fc88 service nova] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Received event network-vif-deleted-aee748ca-32b6-4422-8048-690ec8cdbced {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 549.511656] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 549.545396] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.545623] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.545770] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.545959] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.546380] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.546380] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.546507] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.546591] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.546752] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.546907] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.547098] env[62204]: DEBUG nova.virt.hardware [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.547972] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23acb13a-ac89-4e01-8107-35cc41c5bcf6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.556869] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec71e2e-bda4-4b2c-abc2-0ff9af99db31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.609398] env[62204]: DEBUG nova.scheduler.client.report [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.638714] env[62204]: INFO nova.compute.manager [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Rebuilding instance [ 549.659668] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 549.701834] env[62204]: DEBUG nova.compute.manager [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 549.702851] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa45ce3f-0c1e-4227-a305-31cd84dc441b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.840380] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Releasing lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.841684] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 549.841813] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 549.842090] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4863cc3d-9209-4694-b09a-dca10e1e8572 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.860182] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2984e0f-7871-4dad-9b60-98b915b986d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.888278] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cb218f34-dec8-46ae-8659-6c37df5d51b4 could not be found. [ 549.888573] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 549.888729] env[62204]: INFO nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 549.888928] env[62204]: DEBUG oslo.service.loopingcall [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.889266] env[62204]: DEBUG nova.compute.manager [-] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.889369] env[62204]: DEBUG nova.network.neutron [-] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 549.920093] env[62204]: DEBUG nova.network.neutron [-] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.114760] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.115115] env[62204]: DEBUG nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 550.121263] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.768s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.121263] env[62204]: INFO nova.compute.claims [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.199299] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.216343] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 550.217042] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bd3b59f-14b3-4796-9eff-ff27c90784e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.226778] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 550.226778] env[62204]: value = "task-1199386" [ 550.226778] env[62204]: _type = "Task" [ 550.226778] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.236100] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.425027] env[62204]: DEBUG nova.network.neutron [-] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.626445] env[62204]: DEBUG nova.compute.utils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 550.630492] env[62204]: DEBUG nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 550.740080] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199386, 'name': PowerOffVM_Task, 'duration_secs': 0.115651} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.740421] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 550.740645] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 550.743295] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d624c59-4136-4841-9176-15f109f8b824 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.756053] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 550.756169] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c95482a-e7da-4f6f-bccb-6fea4101a4cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.778744] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 550.778901] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 550.778994] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Deleting the datastore file [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 550.779803] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-255319ca-3dc1-4d99-a221-838a78108986 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.785710] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 550.785710] env[62204]: value = "task-1199388" [ 550.785710] env[62204]: _type = "Task" [ 550.785710] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.793982] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.928357] env[62204]: INFO nova.compute.manager [-] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Took 1.04 seconds to deallocate network for instance. [ 550.931261] env[62204]: DEBUG nova.compute.claims [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 550.931494] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.137056] env[62204]: DEBUG nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 551.297186] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.092326} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.299286] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 551.299286] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 551.299286] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 551.542552] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945ea4fb-6f3b-4da2-9ee6-79a5e035ac83 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.551505] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af588808-ec20-4f1a-b96b-15a2bdb72b53 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.588324] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031d0bd2-a4d9-48a7-9a8a-67ac0b12348e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.595877] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0583dc-07c9-4e8c-b50b-f18e2b4dfa20 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.615479] env[62204]: DEBUG nova.compute.provider_tree [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.710037] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquiring lock "9e573093-6434-452d-8025-4688d9f78c53" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.710037] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Lock "9e573093-6434-452d-8025-4688d9f78c53" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.756392] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "63ed8992-0e8f-41ca-8b28-c0b2538ff61c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.756392] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "63ed8992-0e8f-41ca-8b28-c0b2538ff61c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.118503] env[62204]: DEBUG nova.scheduler.client.report [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 552.148127] env[62204]: DEBUG nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 552.183805] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.184079] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.184245] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.184428] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.184569] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.184710] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.184918] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.185589] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.185892] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.185996] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.186191] env[62204]: DEBUG nova.virt.hardware [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.190290] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac18e71-312e-40a7-9d87-ea5bec2ceee1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.199948] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb27025-76da-4afe-ba32-40ffe063c41e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.216372] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 552.222906] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Creating folder: Project (415ebb8ed7e243ff915fbd51a99c603b). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 552.223303] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b2f4da2-3550-4604-a5f5-24c530582c85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.237194] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Created folder: Project (415ebb8ed7e243ff915fbd51a99c603b) in parent group-v259933. [ 552.237194] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Creating folder: Instances. Parent ref: group-v259942. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 552.237194] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d81ada45-5526-4f48-b49a-d9e75b172c36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.244420] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Created folder: Instances in parent group-v259942. [ 552.244420] env[62204]: DEBUG oslo.service.loopingcall [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.244420] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 552.244420] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e260b66-c714-49ff-b036-7ad817c2cf81 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.261633] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 552.261633] env[62204]: value = "task-1199391" [ 552.261633] env[62204]: _type = "Task" [ 552.261633] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.272446] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199391, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.333767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "7c21539c-35fa-4f58-beb0-e965ffaf79af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.334000] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "7c21539c-35fa-4f58-beb0-e965ffaf79af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.345334] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 552.345334] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 552.345334] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 552.345623] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 552.345623] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 552.346608] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 552.346608] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 552.347282] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 552.347282] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 552.347282] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 552.347813] env[62204]: DEBUG nova.virt.hardware [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 552.350632] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b04e21-04ed-4e0e-bfb3-4e91132a2d3a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.353044] env[62204]: ERROR nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 552.353044] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.353044] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.353044] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.353044] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.353044] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.353044] env[62204]: ERROR nova.compute.manager raise self.value [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.353044] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.353044] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.353044] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.353596] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.353596] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.353596] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 552.353596] env[62204]: ERROR nova.compute.manager [ 552.353596] env[62204]: Traceback (most recent call last): [ 552.353596] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.353596] env[62204]: listener.cb(fileno) [ 552.353596] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.353596] env[62204]: result = function(*args, **kwargs) [ 552.353596] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.353596] env[62204]: return func(*args, **kwargs) [ 552.353596] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.353596] env[62204]: raise e [ 552.353596] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.353596] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 552.353596] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.353596] env[62204]: created_port_ids = self._update_ports_for_instance( [ 552.353596] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.353596] env[62204]: with excutils.save_and_reraise_exception(): [ 552.353596] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.353596] env[62204]: self.force_reraise() [ 552.353596] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.353596] env[62204]: raise self.value [ 552.353596] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.353596] env[62204]: updated_port = self._update_port( [ 552.353596] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.353596] env[62204]: _ensure_no_port_binding_failure(port) [ 552.353596] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.353596] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.354506] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 552.354506] env[62204]: Removing descriptor: 14 [ 552.354506] env[62204]: ERROR nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Traceback (most recent call last): [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] yield resources [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self.driver.spawn(context, instance, image_meta, [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.354506] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] vm_ref = self.build_virtual_machine(instance, [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] for vif in network_info: [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return self._sync_wrapper(fn, *args, **kwargs) [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self.wait() [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self[:] = self._gt.wait() [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return self._exit_event.wait() [ 552.354917] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] result = hub.switch() [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return self.greenlet.switch() [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] result = function(*args, **kwargs) [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return func(*args, **kwargs) [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] raise e [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] nwinfo = self.network_api.allocate_for_instance( [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.355923] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] created_port_ids = self._update_ports_for_instance( [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] with excutils.save_and_reraise_exception(): [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self.force_reraise() [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] raise self.value [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] updated_port = self._update_port( [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] _ensure_no_port_binding_failure(port) [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.356518] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] raise exception.PortBindingFailed(port_id=port['id']) [ 552.356877] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 552.356877] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] [ 552.356877] env[62204]: INFO nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Terminating instance [ 552.359941] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquiring lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.359941] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquired lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.359941] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.365688] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d338847-f961-4c07-8af2-a88f1df2faed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.382666] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 552.388398] env[62204]: DEBUG oslo.service.loopingcall [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 552.388894] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 552.389137] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d65d757-4526-4095-b774-29c275d4bf38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.407041] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 552.407041] env[62204]: value = "task-1199392" [ 552.407041] env[62204]: _type = "Task" [ 552.407041] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.417545] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199392, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.627583] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 552.628645] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 552.631074] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.103s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.774434] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199391, 'name': CreateVM_Task, 'duration_secs': 0.288126} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.774843] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 552.775291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.775516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.776108] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 552.776108] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8759d388-f5bc-4046-876a-3abcb3179c93 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.782071] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 552.782071] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52621906-9fc2-de2d-b2f3-6fc5cb42a161" [ 552.782071] env[62204]: _type = "Task" [ 552.782071] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 552.791825] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52621906-9fc2-de2d-b2f3-6fc5cb42a161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.867772] env[62204]: DEBUG nova.compute.manager [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Received event network-changed-ac76cc56-d0e7-4296-8645-969dbb6f08ed {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 552.867959] env[62204]: DEBUG nova.compute.manager [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Refreshing instance network info cache due to event network-changed-ac76cc56-d0e7-4296-8645-969dbb6f08ed. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 552.868270] env[62204]: DEBUG oslo_concurrency.lockutils [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] Acquiring lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.868732] env[62204]: DEBUG oslo_concurrency.lockutils [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] Acquired lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.869217] env[62204]: DEBUG nova.network.neutron [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Refreshing network info cache for port ac76cc56-d0e7-4296-8645-969dbb6f08ed {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 552.876230] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquiring lock "39d5f95c-7c98-4263-a46e-948d2e3d31ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.876472] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Lock "39d5f95c-7c98-4263-a46e-948d2e3d31ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.897369] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 552.917590] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199392, 'name': CreateVM_Task, 'duration_secs': 0.276484} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 552.917855] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 552.918155] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.929638] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquiring lock "7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.929853] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Lock "7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.014918] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.031710] env[62204]: DEBUG nova.compute.manager [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Received event network-changed-85984b65-22df-43df-9e1b-1e7316bb64d1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.031922] env[62204]: DEBUG nova.compute.manager [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Refreshing instance network info cache due to event network-changed-85984b65-22df-43df-9e1b-1e7316bb64d1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 553.032135] env[62204]: DEBUG oslo_concurrency.lockutils [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] Acquiring lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.134710] env[62204]: DEBUG nova.compute.utils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 553.140124] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 553.140124] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 553.220745] env[62204]: DEBUG nova.policy [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6010257f8b4e4449b249eaf11c7bbc42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6322ffe0d36b4e388e509fd7d2c6ccf5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 553.292849] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52621906-9fc2-de2d-b2f3-6fc5cb42a161, 'name': SearchDatastore_Task, 'duration_secs': 0.008589} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.293407] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.293640] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 553.293863] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.294040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.294235] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 553.294501] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.294787] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 553.294998] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb0cfaa5-7ef0-4310-ba15-f3df45a7619e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.296712] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb4bc2c1-e283-4c80-be76-4c8b5082b50c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.301435] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 553.301435] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ac2e09-c702-10b4-0dc9-67b8c1f6abeb" [ 553.301435] env[62204]: _type = "Task" [ 553.301435] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.305766] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 553.305954] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 553.309206] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5823241-8993-4024-b216-794019df737c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.311257] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ac2e09-c702-10b4-0dc9-67b8c1f6abeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.314073] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 553.314073] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5284ee4b-87e4-6574-f7cb-6b8658c97572" [ 553.314073] env[62204]: _type = "Task" [ 553.314073] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.321412] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5284ee4b-87e4-6574-f7cb-6b8658c97572, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 553.406296] env[62204]: DEBUG nova.network.neutron [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.519645] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Releasing lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.520140] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 553.520326] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 553.520629] env[62204]: DEBUG oslo_concurrency.lockutils [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] Acquired lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.520801] env[62204]: DEBUG nova.network.neutron [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Refreshing network info cache for port 85984b65-22df-43df-9e1b-1e7316bb64d1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 553.522919] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47fc98ac-618b-4b6f-bfd9-cc9bceb959be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.535654] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b2d49c-f96a-4b80-bf77-a2f740642700 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.563230] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1fc621a-bc36-4d55-beec-cdc446bc8d06 could not be found. [ 553.563230] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 553.563230] env[62204]: INFO nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Took 0.04 seconds to destroy the instance on the hypervisor. [ 553.563534] env[62204]: DEBUG oslo.service.loopingcall [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 553.563936] env[62204]: DEBUG nova.compute.manager [-] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 553.564127] env[62204]: DEBUG nova.network.neutron [-] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 553.567891] env[62204]: DEBUG nova.network.neutron [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.583865] env[62204]: DEBUG nova.network.neutron [-] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.644019] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 553.673276] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4644dfab-0758-43e6-bbcc-9930f086a4e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673276] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 62721b0d-0763-43ae-b221-271266bf8794 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673276] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance d7f15c60-04ac-429e-b16f-8774f9a050b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673276] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6e999bd0-38be-42d5-b2b7-3f9196fb941e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673489] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance cb218f34-dec8-46ae-8659-6c37df5d51b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673489] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance c1fc621a-bc36-4d55-beec-cdc446bc8d06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673489] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6c63cc36-4f25-4196-9e74-50dcbefd37a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.673489] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 38104ca0-29bd-4d1e-b20c-47f76491ce32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 553.819025] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ac2e09-c702-10b4-0dc9-67b8c1f6abeb, 'name': SearchDatastore_Task, 'duration_secs': 0.020484} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.823233] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 553.823504] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 553.823710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.826107] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Successfully created port: f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.832200] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5284ee4b-87e4-6574-f7cb-6b8658c97572, 'name': SearchDatastore_Task, 'duration_secs': 0.012538} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.832949] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b67c293-69ad-4e79-ac95-96ce06cb0b96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.839161] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 553.839161] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520598ef-485b-29dd-d416-5f1a79480f8d" [ 553.839161] env[62204]: _type = "Task" [ 553.839161] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 553.847282] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520598ef-485b-29dd-d416-5f1a79480f8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.045252] env[62204]: DEBUG nova.network.neutron [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.072395] env[62204]: DEBUG oslo_concurrency.lockutils [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] Releasing lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.072395] env[62204]: DEBUG nova.compute.manager [req-546d2e0d-7861-4058-8bf4-b5bf21042d2b req-9f7b4736-d571-4a22-9e6a-921406a9b758 service nova] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Received event network-vif-deleted-ac76cc56-d0e7-4296-8645-969dbb6f08ed {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.090812] env[62204]: DEBUG nova.network.neutron [-] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.140168] env[62204]: DEBUG nova.network.neutron [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.180392] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquiring lock "7cccaaf2-f17d-426d-9340-e33260235706" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.180392] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Lock "7cccaaf2-f17d-426d-9340-e33260235706" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.181027] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance ac70a103-cb49-4cef-8069-dd0bb265633a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.285881] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "ce74983e-8347-425c-967a-6a78a7daa701" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.286162] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "ce74983e-8347-425c-967a-6a78a7daa701" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.351501] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520598ef-485b-29dd-d416-5f1a79480f8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009485} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.352057] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.352537] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 6c63cc36-4f25-4196-9e74-50dcbefd37a2/6c63cc36-4f25-4196-9e74-50dcbefd37a2.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 554.353061] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.353430] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 554.354118] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4030909f-c088-448a-8d03-6eeb0366c957 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.358356] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87e79218-a923-4dc0-994e-0d5adc613527 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.365658] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 554.365658] env[62204]: value = "task-1199393" [ 554.365658] env[62204]: _type = "Task" [ 554.365658] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.369569] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 554.369637] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 554.370645] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b07e76e-a068-4bc5-8ea2-6873a37a3254 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.377906] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.379558] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 554.379558] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c36b7-1307-015c-31c3-e21dac52b5ea" [ 554.379558] env[62204]: _type = "Task" [ 554.379558] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.387651] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c36b7-1307-015c-31c3-e21dac52b5ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.596236] env[62204]: INFO nova.compute.manager [-] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Took 1.03 seconds to deallocate network for instance. [ 554.597520] env[62204]: DEBUG nova.compute.claims [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 554.601024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.642889] env[62204]: DEBUG oslo_concurrency.lockutils [req-eff305d6-114b-4f49-905a-22f92ae36aef req-2c7bbfac-50c7-4da5-b01e-50078fb00cf7 service nova] Releasing lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.663257] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 554.686742] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance cab990d6-c8e5-49ce-8274-9c59904193ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 554.696382] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 554.696633] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 554.696783] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 554.697773] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 554.698015] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 554.698188] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 554.698408] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 554.698565] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 554.698731] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 554.699322] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 554.699526] env[62204]: DEBUG nova.virt.hardware [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 554.700423] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26735da6-4a30-46b7-a6fc-985f3cc28100 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.717037] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538292d7-2f51-4d36-92f3-e5e56c3de6b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.885542] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479476} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.885542] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 6c63cc36-4f25-4196-9e74-50dcbefd37a2/6c63cc36-4f25-4196-9e74-50dcbefd37a2.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 554.885542] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 554.887092] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7bf5f87-ca36-4374-bcf6-de5f229658f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.896950] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c36b7-1307-015c-31c3-e21dac52b5ea, 'name': SearchDatastore_Task, 'duration_secs': 0.007997} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 554.899028] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 554.899028] env[62204]: value = "task-1199394" [ 554.899028] env[62204]: _type = "Task" [ 554.899028] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.899480] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-023a41db-ed38-4fb8-89ce-f14edb8485d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.907773] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 554.907773] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522194c9-51b8-f93e-17c7-d5978bc8bac6" [ 554.907773] env[62204]: _type = "Task" [ 554.907773] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 554.911027] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 554.919282] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522194c9-51b8-f93e-17c7-d5978bc8bac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.191337] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 111c0b93-2f02-4f30-9389-0b7f9b041ee8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.412228] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138659} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.416117] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 555.419019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37cf95f-714f-49d2-9b79-8dc1f8dbeacb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.428874] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522194c9-51b8-f93e-17c7-d5978bc8bac6, 'name': SearchDatastore_Task, 'duration_secs': 0.02646} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 555.438705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.439344] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 555.449360] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 6c63cc36-4f25-4196-9e74-50dcbefd37a2/6c63cc36-4f25-4196-9e74-50dcbefd37a2.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 555.449730] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a43d3b3-1fa3-476a-9868-713ea8a9c39c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.451890] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e63a036e-7865-4e85-bef8-5728deab2c58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.479243] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 555.479243] env[62204]: value = "task-1199395" [ 555.479243] env[62204]: _type = "Task" [ 555.479243] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.480955] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 555.480955] env[62204]: value = "task-1199396" [ 555.480955] env[62204]: _type = "Task" [ 555.480955] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 555.490486] env[62204]: DEBUG nova.compute.manager [req-651c2fde-8cf0-4487-97be-fff611bf9dd7 req-cf558eaf-1afc-44a0-8876-56c51a0c32e3 service nova] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Received event network-vif-deleted-85984b65-22df-43df-9e1b-1e7316bb64d1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 555.494590] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.497665] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.579777] env[62204]: ERROR nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 555.579777] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.579777] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 555.579777] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 555.579777] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.579777] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.579777] env[62204]: ERROR nova.compute.manager raise self.value [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 555.579777] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 555.579777] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.579777] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 555.580335] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.580335] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 555.580335] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 555.580335] env[62204]: ERROR nova.compute.manager [ 555.580335] env[62204]: Traceback (most recent call last): [ 555.580335] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 555.580335] env[62204]: listener.cb(fileno) [ 555.580335] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 555.580335] env[62204]: result = function(*args, **kwargs) [ 555.580335] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 555.580335] env[62204]: return func(*args, **kwargs) [ 555.580335] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 555.580335] env[62204]: raise e [ 555.580335] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.580335] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 555.580335] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 555.580335] env[62204]: created_port_ids = self._update_ports_for_instance( [ 555.580335] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 555.580335] env[62204]: with excutils.save_and_reraise_exception(): [ 555.580335] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.580335] env[62204]: self.force_reraise() [ 555.580335] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.580335] env[62204]: raise self.value [ 555.580335] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 555.580335] env[62204]: updated_port = self._update_port( [ 555.580335] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.580335] env[62204]: _ensure_no_port_binding_failure(port) [ 555.580335] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.580335] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 555.581239] env[62204]: nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 555.581239] env[62204]: Removing descriptor: 14 [ 555.581239] env[62204]: ERROR nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Traceback (most recent call last): [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] yield resources [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self.driver.spawn(context, instance, image_meta, [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 555.581239] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] vm_ref = self.build_virtual_machine(instance, [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] vif_infos = vmwarevif.get_vif_info(self._session, [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] for vif in network_info: [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return self._sync_wrapper(fn, *args, **kwargs) [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self.wait() [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self[:] = self._gt.wait() [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return self._exit_event.wait() [ 555.581634] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] result = hub.switch() [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return self.greenlet.switch() [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] result = function(*args, **kwargs) [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return func(*args, **kwargs) [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] raise e [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] nwinfo = self.network_api.allocate_for_instance( [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 555.582059] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] created_port_ids = self._update_ports_for_instance( [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] with excutils.save_and_reraise_exception(): [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self.force_reraise() [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] raise self.value [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] updated_port = self._update_port( [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] _ensure_no_port_binding_failure(port) [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 555.582742] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] raise exception.PortBindingFailed(port_id=port['id']) [ 555.583235] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 555.583235] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] [ 555.583235] env[62204]: INFO nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Terminating instance [ 555.584515] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquiring lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 555.584797] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquired lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 555.585032] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 555.694455] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6c675e27-0de4-46bc-8017-5ee43e2efa5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 555.998964] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 555.999658] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199395, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.101496] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.200158] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.201806] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4a2b5dbe-ed48-40b6-ba72-a06b14e31696 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.498487] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199396, 'name': ReconfigVM_Task, 'duration_secs': 0.592997} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.501758] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 6c63cc36-4f25-4196-9e74-50dcbefd37a2/6c63cc36-4f25-4196-9e74-50dcbefd37a2.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 556.503322] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533208} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 556.503563] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-deccbc23-4ad1-4d7a-8e79-ea845f5f9e4f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.505272] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 556.505624] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 556.505799] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ea01f66-1fb0-4ee6-869d-43e67e50d246 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.517202] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 556.517202] env[62204]: value = "task-1199398" [ 556.517202] env[62204]: _type = "Task" [ 556.517202] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.518534] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 556.518534] env[62204]: value = "task-1199397" [ 556.518534] env[62204]: _type = "Task" [ 556.518534] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 556.530150] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.532987] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199397, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 556.705099] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Releasing lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 556.705505] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 556.706020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 556.706460] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 571b574b-27f2-4e95-9309-fd3097fb4f64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 556.709759] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78954797-4330-49c2-9a47-deb043337d17 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.719530] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf44710-8984-436e-8823-be67fca433ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.746488] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 38104ca0-29bd-4d1e-b20c-47f76491ce32 could not be found. [ 556.746488] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 556.746488] env[62204]: INFO nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Took 0.04 seconds to destroy the instance on the hypervisor. [ 556.746488] env[62204]: DEBUG oslo.service.loopingcall [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 556.746991] env[62204]: DEBUG nova.compute.manager [-] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 556.747317] env[62204]: DEBUG nova.network.neutron [-] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 556.774338] env[62204]: DEBUG nova.network.neutron [-] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.033137] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097165} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.036102] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 557.036413] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199397, 'name': Rename_Task, 'duration_secs': 0.137983} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.037075] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e8b5cb-e741-4a14-828a-efb779034bf0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.039637] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 557.039725] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e995d983-9933-4249-a299-a1f262a57ac7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.057688] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 557.058947] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cb5b100-894c-4285-9183-55c694901f8e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.073383] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 557.073383] env[62204]: value = "task-1199399" [ 557.073383] env[62204]: _type = "Task" [ 557.073383] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.078521] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 557.078521] env[62204]: value = "task-1199400" [ 557.078521] env[62204]: _type = "Task" [ 557.078521] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.081964] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199399, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.089943] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199400, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.213122] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 72514005-1023-4db6-9e51-9b0855083411 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.277872] env[62204]: DEBUG nova.network.neutron [-] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.587747] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199399, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.596329] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199400, 'name': ReconfigVM_Task, 'duration_secs': 0.258134} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 557.597028] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5/4644dfab-0758-43e6-bbcc-9930f086a4e5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 557.597254] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4420b73-d409-45fe-aea0-1d79635a7141 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.604076] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 557.604076] env[62204]: value = "task-1199401" [ 557.604076] env[62204]: _type = "Task" [ 557.604076] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 557.611889] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199401, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 557.716912] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 47409cd0-db33-4a94-b806-1799a6f7e98f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 557.781212] env[62204]: INFO nova.compute.manager [-] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Took 1.03 seconds to deallocate network for instance. [ 557.787698] env[62204]: DEBUG nova.compute.claims [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 557.787804] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.892814] env[62204]: DEBUG nova.compute.manager [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Received event network-changed-f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 557.894476] env[62204]: DEBUG nova.compute.manager [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Refreshing instance network info cache due to event network-changed-f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 557.894887] env[62204]: DEBUG oslo_concurrency.lockutils [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] Acquiring lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.895042] env[62204]: DEBUG oslo_concurrency.lockutils [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] Acquired lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.895288] env[62204]: DEBUG nova.network.neutron [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Refreshing network info cache for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 558.017770] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquiring lock "0ab619ea-755b-4d71-9c12-0eeda0b42a39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.018333] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Lock "0ab619ea-755b-4d71-9c12-0eeda0b42a39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.090393] env[62204]: DEBUG oslo_vmware.api [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199399, 'name': PowerOnVM_Task, 'duration_secs': 0.544909} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.090695] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 558.090842] env[62204]: INFO nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Took 5.94 seconds to spawn the instance on the hypervisor. [ 558.091032] env[62204]: DEBUG nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 558.091791] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b15094-18cd-4ccb-9669-7851f49532fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.113138] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199401, 'name': Rename_Task, 'duration_secs': 0.146643} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.113408] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 558.113643] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ddd70e4-8549-4105-83c3-fa2a5865951b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.119589] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Waiting for the task: (returnval){ [ 558.119589] env[62204]: value = "task-1199402" [ 558.119589] env[62204]: _type = "Task" [ 558.119589] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.129266] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 558.220212] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance d52bfb49-beb0-4bfe-b3bb-45132c210065 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 558.429200] env[62204]: DEBUG nova.network.neutron [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.562052] env[62204]: DEBUG nova.network.neutron [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.615996] env[62204]: INFO nova.compute.manager [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Took 24.71 seconds to build instance. [ 558.639054] env[62204]: DEBUG oslo_vmware.api [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Task: {'id': task-1199402, 'name': PowerOnVM_Task, 'duration_secs': 0.446784} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 558.639407] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 558.639553] env[62204]: DEBUG nova.compute.manager [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 558.640373] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1176d143-3e8a-436b-aba9-8c6c3558fdaf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.723441] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.068756] env[62204]: DEBUG oslo_concurrency.lockutils [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] Releasing lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.068756] env[62204]: DEBUG nova.compute.manager [req-0bd550c8-1119-4231-adc9-d56ea88a363d req-38e73083-7a95-4713-87c8-1d395880d054 service nova] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Received event network-vif-deleted-f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 559.120316] env[62204]: DEBUG oslo_concurrency.lockutils [None req-47af61b1-ec73-4b05-b47f-0686b1839de7 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.048s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.159514] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.233254] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 292b9c31-2ea1-4b28-8b60-79c6c80e1531 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 559.356123] env[62204]: DEBUG nova.compute.manager [None req-db61f5ac-161e-4562-bf4b-1ead64f80a3a tempest-ServerDiagnosticsV248Test-1019490833 tempest-ServerDiagnosticsV248Test-1019490833-project-admin] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 559.357274] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d3e255-9cdb-4174-bffa-96030966e54e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.365176] env[62204]: INFO nova.compute.manager [None req-db61f5ac-161e-4562-bf4b-1ead64f80a3a tempest-ServerDiagnosticsV248Test-1019490833 tempest-ServerDiagnosticsV248Test-1019490833-project-admin] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Retrieving diagnostics [ 559.365813] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670e70b3-398b-4062-b9ba-5a05b50d69ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.623975] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 559.737557] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 25e4a40e-c5a6-47f5-9e53-00d3073252fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.150918] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.242560] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance f5a11b74-e1fe-44c0-bccb-5817bc582608 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 560.550140] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.550140] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.550140] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "4644dfab-0758-43e6-bbcc-9930f086a4e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 560.550140] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 560.550752] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 560.552103] env[62204]: INFO nova.compute.manager [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Terminating instance [ 560.555260] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "refresh_cache-4644dfab-0758-43e6-bbcc-9930f086a4e5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.555342] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquired lock "refresh_cache-4644dfab-0758-43e6-bbcc-9930f086a4e5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.555507] env[62204]: DEBUG nova.network.neutron [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.744927] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 5e264b99-8025-471a-bc6b-238f1bca054c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.084210] env[62204]: DEBUG nova.network.neutron [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.185103] env[62204]: DEBUG nova.network.neutron [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.251704] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 9e573093-6434-452d-8025-4688d9f78c53 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 561.691024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Releasing lock "refresh_cache-4644dfab-0758-43e6-bbcc-9930f086a4e5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.691024] env[62204]: DEBUG nova.compute.manager [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 561.691024] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 561.691024] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1553fe5-5140-4489-96f5-9326acdaec5b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.698676] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 561.699027] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-110fa1e3-7c73-41ee-be5b-06e8cea3bcd6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.705625] env[62204]: DEBUG oslo_vmware.api [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 561.705625] env[62204]: value = "task-1199403" [ 561.705625] env[62204]: _type = "Task" [ 561.705625] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 561.714826] env[62204]: DEBUG oslo_vmware.api [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 561.756093] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 63ed8992-0e8f-41ca-8b28-c0b2538ff61c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.216281] env[62204]: DEBUG oslo_vmware.api [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199403, 'name': PowerOffVM_Task, 'duration_secs': 0.131126} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.216614] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 562.216806] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 562.217101] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63ea2932-405e-4699-9e76-2f870b54d1c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.243257] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 562.243257] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 562.243257] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleting the datastore file [datastore2] 4644dfab-0758-43e6-bbcc-9930f086a4e5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 562.243257] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3de548e7-005f-4712-ae5b-a37d74d9104b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.252649] env[62204]: DEBUG oslo_vmware.api [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for the task: (returnval){ [ 562.252649] env[62204]: value = "task-1199405" [ 562.252649] env[62204]: _type = "Task" [ 562.252649] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 562.261200] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7c21539c-35fa-4f58-beb0-e965ffaf79af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.268243] env[62204]: DEBUG oslo_vmware.api [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 562.761770] env[62204]: DEBUG oslo_vmware.api [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Task: {'id': task-1199405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432234} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 562.762201] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 562.762440] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 562.762648] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 562.762852] env[62204]: INFO nova.compute.manager [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Took 1.07 seconds to destroy the instance on the hypervisor. [ 562.763172] env[62204]: DEBUG oslo.service.loopingcall [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 562.763404] env[62204]: DEBUG nova.compute.manager [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 562.763525] env[62204]: DEBUG nova.network.neutron [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 562.765679] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 39d5f95c-7c98-4263-a46e-948d2e3d31ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 562.807225] env[62204]: DEBUG nova.network.neutron [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.273742] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 563.276056] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 563.276056] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 563.319255] env[62204]: DEBUG nova.network.neutron [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.721421] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e32314-3694-4e6a-b056-33c5f458f9d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.728963] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8d73a9-2e2d-403b-aaba-85191cbf6101 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.760704] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc4dcb5-cbfd-4510-ab6b-5aa4213b44af {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.769102] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8292f3a4-965f-4485-9218-c1e8f3065468 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.783357] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.820862] env[62204]: INFO nova.compute.manager [-] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Took 1.06 seconds to deallocate network for instance. [ 564.286320] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 564.331192] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 564.791703] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 564.791703] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.160s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.791938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.594s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.795855] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.795855] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Getting list of instances from cluster (obj){ [ 564.795855] env[62204]: value = "domain-c8" [ 564.795855] env[62204]: _type = "ClusterComputeResource" [ 564.795855] env[62204]: } {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 564.800268] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08107715-144f-4950-85d8-4af3ae98dd0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.818407] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Got total of 1 instances {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 565.697107] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66c44ad6-955b-4bcb-a863-3ee08fc39071 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.707641] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705ac2c0-0e8d-419e-84aa-bdc66f22ac96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.739144] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60732bc2-d3eb-4a85-a901-000d1a5f6509 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.747214] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f841da9-36b4-432a-8991-fddc1d2a81a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.762037] env[62204]: DEBUG nova.compute.provider_tree [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.897424] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.897649] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.264946] env[62204]: DEBUG nova.scheduler.client.report [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.771938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.980s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.772672] env[62204]: ERROR nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] Traceback (most recent call last): [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self.driver.spawn(context, instance, image_meta, [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self._vmops.spawn(context, instance, image_meta, injected_files, [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] vm_ref = self.build_virtual_machine(instance, [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] vif_infos = vmwarevif.get_vif_info(self._session, [ 566.772672] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] for vif in network_info: [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return self._sync_wrapper(fn, *args, **kwargs) [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self.wait() [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self[:] = self._gt.wait() [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return self._exit_event.wait() [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] result = hub.switch() [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 566.773069] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return self.greenlet.switch() [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] result = function(*args, **kwargs) [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] return func(*args, **kwargs) [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] raise e [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] nwinfo = self.network_api.allocate_for_instance( [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] created_port_ids = self._update_ports_for_instance( [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] with excutils.save_and_reraise_exception(): [ 566.773504] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] self.force_reraise() [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] raise self.value [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] updated_port = self._update_port( [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] _ensure_no_port_binding_failure(port) [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] raise exception.PortBindingFailed(port_id=port['id']) [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] nova.exception.PortBindingFailed: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. [ 566.773890] env[62204]: ERROR nova.compute.manager [instance: 62721b0d-0763-43ae-b221-271266bf8794] [ 566.774264] env[62204]: DEBUG nova.compute.utils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 566.774813] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.091s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.776262] env[62204]: INFO nova.compute.claims [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 566.778966] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Build of instance 62721b0d-0763-43ae-b221-271266bf8794 was re-scheduled: Binding failed for port 423ba96a-ddc9-4e32-a315-359fa67c151f, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 566.779861] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 566.779861] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquiring lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 566.779861] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Acquired lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.780035] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.299365] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 567.394332] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 567.898763] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Releasing lock "refresh_cache-62721b0d-0763-43ae-b221-271266bf8794" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 567.898763] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 567.898763] env[62204]: DEBUG nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 567.898763] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 567.934238] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.189914] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef3b088-ab89-4719-82b6-24d92700d0ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.197913] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331d2599-91f7-4af3-8696-b29fe6d814ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.230291] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d6ba94-8835-4240-9943-d5cbbe772c2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.238064] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5daccf-44a8-4252-b1ec-6e6bd51ae4e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.251905] env[62204]: DEBUG nova.compute.provider_tree [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 568.437127] env[62204]: DEBUG nova.network.neutron [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.755836] env[62204]: DEBUG nova.scheduler.client.report [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.940610] env[62204]: INFO nova.compute.manager [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] [instance: 62721b0d-0763-43ae-b221-271266bf8794] Took 1.04 seconds to deallocate network for instance. [ 569.262610] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.263405] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 569.267312] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.478s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.773457] env[62204]: DEBUG nova.compute.utils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 569.778328] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 569.778328] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 569.843842] env[62204]: DEBUG nova.policy [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 569.941865] env[62204]: DEBUG nova.compute.manager [None req-cb9c93f7-cede-4336-b13f-64c9d319f2ee tempest-ServerDiagnosticsV248Test-1019490833 tempest-ServerDiagnosticsV248Test-1019490833-project-admin] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 569.942980] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676242a5-87ba-4728-83e3-62351c67934c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.955270] env[62204]: INFO nova.compute.manager [None req-cb9c93f7-cede-4336-b13f-64c9d319f2ee tempest-ServerDiagnosticsV248Test-1019490833 tempest-ServerDiagnosticsV248Test-1019490833-project-admin] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Retrieving diagnostics [ 569.956161] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd94b147-86fe-44e5-92ed-44d7cbaed02d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.990564] env[62204]: INFO nova.scheduler.client.report [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Deleted allocations for instance 62721b0d-0763-43ae-b221-271266bf8794 [ 570.237355] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1d3d28-f9a5-4978-9f26-a8cd78c42dbf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.244803] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8755fe1f-99e5-4b25-a4f3-830b8fc82eb0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.278388] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554a3d94-18a7-4f2d-b737-ef01a1c638ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.281182] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 570.288925] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f7c19a-a652-46b8-a767-a902e59b6af7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.302489] env[62204]: DEBUG nova.compute.provider_tree [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 570.411767] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Successfully created port: bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.500638] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4b386bbc-f52c-40cf-b2d5-3dc4fc3a31f5 tempest-ServersAdminTestJSON-426882600 tempest-ServersAdminTestJSON-426882600-project-member] Lock "62721b0d-0763-43ae-b221-271266bf8794" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.207s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.805874] env[62204]: DEBUG nova.scheduler.client.report [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 571.003528] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 571.155134] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.155440] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.155652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.155831] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.155998] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.159321] env[62204]: INFO nova.compute.manager [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Terminating instance [ 571.161401] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "refresh_cache-6c63cc36-4f25-4196-9e74-50dcbefd37a2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.161572] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquired lock "refresh_cache-6c63cc36-4f25-4196-9e74-50dcbefd37a2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.161862] env[62204]: DEBUG nova.network.neutron [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.292037] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 571.311998] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.045s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 571.312887] env[62204]: ERROR nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Traceback (most recent call last): [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self.driver.spawn(context, instance, image_meta, [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] vm_ref = self.build_virtual_machine(instance, [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.312887] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] for vif in network_info: [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return self._sync_wrapper(fn, *args, **kwargs) [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self.wait() [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self[:] = self._gt.wait() [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return self._exit_event.wait() [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] result = hub.switch() [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.313376] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return self.greenlet.switch() [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] result = function(*args, **kwargs) [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] return func(*args, **kwargs) [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] raise e [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] nwinfo = self.network_api.allocate_for_instance( [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] created_port_ids = self._update_ports_for_instance( [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] with excutils.save_and_reraise_exception(): [ 571.313922] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] self.force_reraise() [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] raise self.value [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] updated_port = self._update_port( [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] _ensure_no_port_binding_failure(port) [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] raise exception.PortBindingFailed(port_id=port['id']) [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] nova.exception.PortBindingFailed: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. [ 571.314341] env[62204]: ERROR nova.compute.manager [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] [ 571.314702] env[62204]: DEBUG nova.compute.utils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 571.314702] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.715s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.316491] env[62204]: INFO nova.compute.claims [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 571.325211] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Build of instance d7f15c60-04ac-429e-b16f-8774f9a050b8 was re-scheduled: Binding failed for port e854ad35-bc66-487f-aa75-3e582f8b74c2, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 571.325937] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 571.326306] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquiring lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.326560] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Acquired lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.326833] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 571.344391] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 571.344647] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 571.344801] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 571.344977] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 571.345129] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 571.345271] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 571.345488] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 571.345641] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 571.345830] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 571.345966] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 571.346191] env[62204]: DEBUG nova.virt.hardware [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 571.347360] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac1d793-ba76-4ce9-981f-64dc406590a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.357150] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6c1c86-fc37-40c3-acba-f5986dc192db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.530649] env[62204]: DEBUG nova.compute.manager [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Received event network-changed-bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 571.530854] env[62204]: DEBUG nova.compute.manager [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Refreshing instance network info cache due to event network-changed-bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 571.531069] env[62204]: DEBUG oslo_concurrency.lockutils [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] Acquiring lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.531244] env[62204]: DEBUG oslo_concurrency.lockutils [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] Acquired lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.531407] env[62204]: DEBUG nova.network.neutron [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Refreshing network info cache for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 571.533665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 571.650259] env[62204]: ERROR nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 571.650259] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.650259] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 571.650259] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 571.650259] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.650259] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.650259] env[62204]: ERROR nova.compute.manager raise self.value [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 571.650259] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 571.650259] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.650259] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 571.650813] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.650813] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 571.650813] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 571.650813] env[62204]: ERROR nova.compute.manager [ 571.650813] env[62204]: Traceback (most recent call last): [ 571.650813] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 571.650813] env[62204]: listener.cb(fileno) [ 571.650813] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.650813] env[62204]: result = function(*args, **kwargs) [ 571.650813] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 571.650813] env[62204]: return func(*args, **kwargs) [ 571.650813] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.650813] env[62204]: raise e [ 571.650813] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.650813] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 571.650813] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 571.650813] env[62204]: created_port_ids = self._update_ports_for_instance( [ 571.650813] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 571.650813] env[62204]: with excutils.save_and_reraise_exception(): [ 571.650813] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.650813] env[62204]: self.force_reraise() [ 571.650813] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.650813] env[62204]: raise self.value [ 571.650813] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 571.650813] env[62204]: updated_port = self._update_port( [ 571.650813] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.650813] env[62204]: _ensure_no_port_binding_failure(port) [ 571.650813] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.650813] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 571.651755] env[62204]: nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 571.651755] env[62204]: Removing descriptor: 14 [ 571.651755] env[62204]: ERROR nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Traceback (most recent call last): [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] yield resources [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self.driver.spawn(context, instance, image_meta, [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 571.651755] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] vm_ref = self.build_virtual_machine(instance, [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] vif_infos = vmwarevif.get_vif_info(self._session, [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] for vif in network_info: [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return self._sync_wrapper(fn, *args, **kwargs) [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self.wait() [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self[:] = self._gt.wait() [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return self._exit_event.wait() [ 571.652155] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] result = hub.switch() [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return self.greenlet.switch() [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] result = function(*args, **kwargs) [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return func(*args, **kwargs) [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] raise e [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] nwinfo = self.network_api.allocate_for_instance( [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 571.652581] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] created_port_ids = self._update_ports_for_instance( [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] with excutils.save_and_reraise_exception(): [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self.force_reraise() [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] raise self.value [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] updated_port = self._update_port( [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] _ensure_no_port_binding_failure(port) [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 571.653008] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] raise exception.PortBindingFailed(port_id=port['id']) [ 571.653530] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 571.653530] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] [ 571.653530] env[62204]: INFO nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Terminating instance [ 571.654437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 571.684339] env[62204]: DEBUG nova.network.neutron [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.740938] env[62204]: DEBUG nova.network.neutron [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.858858] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.949673] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.051993] env[62204]: DEBUG nova.network.neutron [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.232352] env[62204]: DEBUG nova.network.neutron [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.243728] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Releasing lock "refresh_cache-6c63cc36-4f25-4196-9e74-50dcbefd37a2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.246022] env[62204]: DEBUG nova.compute.manager [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 572.246022] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 572.246022] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db186e2-2e66-453e-a3a8-e0c5bc12f548 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.255544] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 572.255936] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-caa5f0d8-6187-4014-bfe0-07d30bc9d998 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.265018] env[62204]: DEBUG oslo_vmware.api [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 572.265018] env[62204]: value = "task-1199406" [ 572.265018] env[62204]: _type = "Task" [ 572.265018] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.272894] env[62204]: DEBUG oslo_vmware.api [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.454576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Releasing lock "refresh_cache-d7f15c60-04ac-429e-b16f-8774f9a050b8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.454925] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 572.455274] env[62204]: DEBUG nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 572.456440] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 572.485389] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 572.734964] env[62204]: DEBUG oslo_concurrency.lockutils [req-2659a995-fe4a-4c09-9919-5e669d239376 req-269f3f56-935a-4205-a1dd-e7bf3e495cb4 service nova] Releasing lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.735472] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.735656] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 572.774386] env[62204]: DEBUG oslo_vmware.api [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199406, 'name': PowerOffVM_Task, 'duration_secs': 0.194991} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.774558] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 572.774767] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 572.774957] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-724005cd-e25e-487e-8bc8-3aca5d5f76a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.778407] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38df9130-7614-4559-bf73-e934a0f478c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.786665] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db8ade2-2d73-4e0a-ba34-a65c6187973d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.817605] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875f9449-79e3-4648-9a61-37c49422ac2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.820951] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 572.820951] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 572.820951] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Deleting the datastore file [datastore2] 6c63cc36-4f25-4196-9e74-50dcbefd37a2 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 572.820951] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe057057-72a4-40e4-b1a7-b1bf6206d9c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.829089] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda7ef4a-233a-4c3c-91f3-5aee4ac6c20a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.832883] env[62204]: DEBUG oslo_vmware.api [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for the task: (returnval){ [ 572.832883] env[62204]: value = "task-1199408" [ 572.832883] env[62204]: _type = "Task" [ 572.832883] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.844598] env[62204]: DEBUG nova.compute.provider_tree [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.850139] env[62204]: DEBUG oslo_vmware.api [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.987862] env[62204]: DEBUG nova.network.neutron [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.269623] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.343743] env[62204]: DEBUG oslo_vmware.api [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Task: {'id': task-1199408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093333} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.347020] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 573.347020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 573.347020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 573.347020] env[62204]: INFO nova.compute.manager [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Took 1.10 seconds to destroy the instance on the hypervisor. [ 573.347020] env[62204]: DEBUG oslo.service.loopingcall [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 573.347364] env[62204]: DEBUG nova.compute.manager [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 573.347364] env[62204]: DEBUG nova.network.neutron [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 573.347892] env[62204]: DEBUG nova.scheduler.client.report [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 573.379838] env[62204]: DEBUG nova.network.neutron [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 573.466189] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.492615] env[62204]: INFO nova.compute.manager [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] [instance: d7f15c60-04ac-429e-b16f-8774f9a050b8] Took 1.04 seconds to deallocate network for instance. [ 573.577413] env[62204]: DEBUG nova.compute.manager [req-0439d0cb-06e4-4c16-b29e-5700a655aecb req-63535e4c-c1f3-4df2-973e-c2a52f4f0e0e service nova] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Received event network-vif-deleted-bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 573.853914] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 573.855031] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 573.857080] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.373s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 573.857285] env[62204]: DEBUG nova.objects.instance [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 573.883525] env[62204]: DEBUG nova.network.neutron [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.968492] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 573.968915] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 573.969126] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 573.969425] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa1910cc-63fb-4927-9723-fe5ddc9df2e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.978136] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c21699-6249-49bc-9070-5a7b16ece54e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.002362] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ac70a103-cb49-4cef-8069-dd0bb265633a could not be found. [ 574.002897] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 574.002897] env[62204]: INFO nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 574.003044] env[62204]: DEBUG oslo.service.loopingcall [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 574.003515] env[62204]: DEBUG nova.compute.manager [-] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 574.003613] env[62204]: DEBUG nova.network.neutron [-] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 574.018822] env[62204]: DEBUG nova.network.neutron [-] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 574.362050] env[62204]: DEBUG nova.compute.utils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 574.366168] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 574.366342] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 574.387641] env[62204]: INFO nova.compute.manager [-] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Took 1.04 seconds to deallocate network for instance. [ 574.425384] env[62204]: DEBUG nova.policy [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9b6485d1a706401e8c7d21bce1a13579', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd69ddf730fff40a98f43a3b13dd30e7c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 574.520911] env[62204]: DEBUG nova.network.neutron [-] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 574.528100] env[62204]: INFO nova.scheduler.client.report [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Deleted allocations for instance d7f15c60-04ac-429e-b16f-8774f9a050b8 [ 574.801884] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Successfully created port: 152bcbb3-9c6f-45c3-96f1-a1ce974a378f {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.867195] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 574.873099] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55f84624-2797-47d0-b947-39c9c064dde4 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.873099] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.914s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.895335] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.024513] env[62204]: INFO nova.compute.manager [-] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Took 1.02 seconds to deallocate network for instance. [ 575.027342] env[62204]: DEBUG nova.compute.claims [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 575.027540] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.043332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4bb04729-4b88-44d9-a4dd-f56a6a7ec7b8 tempest-TenantUsagesTestJSON-796146552 tempest-TenantUsagesTestJSON-796146552-project-member] Lock "d7f15c60-04ac-429e-b16f-8774f9a050b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.228s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 575.546446] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 575.798016] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee74632f-7324-41c0-ba5d-e8b1689fdb54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.806781] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb51ba58-70a5-49fb-99ed-35e24779fe56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.842300] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de31c149-470a-4c98-88dd-01f589dbc6e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.854445] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc651d54-a5e6-4cb5-84b4-c00fb0993ec8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.874615] env[62204]: DEBUG nova.compute.provider_tree [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 575.881689] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 575.911586] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 575.911866] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 575.912029] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.912215] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 575.912355] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.912498] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 575.912698] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 575.912848] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 575.913596] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 575.913803] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 575.913978] env[62204]: DEBUG nova.virt.hardware [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 575.915341] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fb351f-e3d1-4d26-8cbe-765262cc692d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.924385] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5764c418-9a89-49e3-82d3-3ea40a45a350 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.028729] env[62204]: DEBUG nova.compute.manager [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Received event network-changed-152bcbb3-9c6f-45c3-96f1-a1ce974a378f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 576.028924] env[62204]: DEBUG nova.compute.manager [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Refreshing instance network info cache due to event network-changed-152bcbb3-9c6f-45c3-96f1-a1ce974a378f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 576.029150] env[62204]: DEBUG oslo_concurrency.lockutils [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] Acquiring lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.029289] env[62204]: DEBUG oslo_concurrency.lockutils [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] Acquired lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.029446] env[62204]: DEBUG nova.network.neutron [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Refreshing network info cache for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 576.071122] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.209770] env[62204]: ERROR nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 576.209770] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.209770] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.209770] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.209770] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.209770] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.209770] env[62204]: ERROR nova.compute.manager raise self.value [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.209770] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 576.209770] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.209770] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 576.210365] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.210365] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 576.210365] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 576.210365] env[62204]: ERROR nova.compute.manager [ 576.210365] env[62204]: Traceback (most recent call last): [ 576.210365] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 576.210365] env[62204]: listener.cb(fileno) [ 576.210365] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.210365] env[62204]: result = function(*args, **kwargs) [ 576.210365] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.210365] env[62204]: return func(*args, **kwargs) [ 576.210365] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.210365] env[62204]: raise e [ 576.210365] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.210365] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 576.210365] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.210365] env[62204]: created_port_ids = self._update_ports_for_instance( [ 576.210365] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.210365] env[62204]: with excutils.save_and_reraise_exception(): [ 576.210365] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.210365] env[62204]: self.force_reraise() [ 576.210365] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.210365] env[62204]: raise self.value [ 576.210365] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.210365] env[62204]: updated_port = self._update_port( [ 576.210365] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.210365] env[62204]: _ensure_no_port_binding_failure(port) [ 576.210365] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.210365] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 576.212559] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 576.212559] env[62204]: Removing descriptor: 14 [ 576.212559] env[62204]: ERROR nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Traceback (most recent call last): [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] yield resources [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self.driver.spawn(context, instance, image_meta, [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.212559] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] vm_ref = self.build_virtual_machine(instance, [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] for vif in network_info: [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return self._sync_wrapper(fn, *args, **kwargs) [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self.wait() [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self[:] = self._gt.wait() [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return self._exit_event.wait() [ 576.212965] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] result = hub.switch() [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return self.greenlet.switch() [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] result = function(*args, **kwargs) [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return func(*args, **kwargs) [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] raise e [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] nwinfo = self.network_api.allocate_for_instance( [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.213438] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] created_port_ids = self._update_ports_for_instance( [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] with excutils.save_and_reraise_exception(): [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self.force_reraise() [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] raise self.value [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] updated_port = self._update_port( [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] _ensure_no_port_binding_failure(port) [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.213883] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] raise exception.PortBindingFailed(port_id=port['id']) [ 576.214284] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 576.214284] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] [ 576.214284] env[62204]: INFO nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Terminating instance [ 576.214821] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.379364] env[62204]: DEBUG nova.scheduler.client.report [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 576.553612] env[62204]: DEBUG nova.network.neutron [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 576.608656] env[62204]: DEBUG nova.network.neutron [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 576.888133] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.015s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 576.888133] env[62204]: ERROR nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Traceback (most recent call last): [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self.driver.spawn(context, instance, image_meta, [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 576.888133] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] vm_ref = self.build_virtual_machine(instance, [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] vif_infos = vmwarevif.get_vif_info(self._session, [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] for vif in network_info: [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return self._sync_wrapper(fn, *args, **kwargs) [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self.wait() [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self[:] = self._gt.wait() [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return self._exit_event.wait() [ 576.888474] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] result = hub.switch() [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return self.greenlet.switch() [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] result = function(*args, **kwargs) [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] return func(*args, **kwargs) [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] raise e [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] nwinfo = self.network_api.allocate_for_instance( [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 576.888939] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] created_port_ids = self._update_ports_for_instance( [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] with excutils.save_and_reraise_exception(): [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] self.force_reraise() [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] raise self.value [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] updated_port = self._update_port( [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] _ensure_no_port_binding_failure(port) [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 576.889420] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] raise exception.PortBindingFailed(port_id=port['id']) [ 576.889857] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] nova.exception.PortBindingFailed: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. [ 576.889857] env[62204]: ERROR nova.compute.manager [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] [ 576.889857] env[62204]: DEBUG nova.compute.utils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 576.890557] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.691s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.892042] env[62204]: INFO nova.compute.claims [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 576.894805] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Build of instance 6e999bd0-38be-42d5-b2b7-3f9196fb941e was re-scheduled: Binding failed for port aee748ca-32b6-4422-8048-690ec8cdbced, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 576.895219] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 576.895442] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquiring lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 576.895587] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Acquired lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.895786] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 577.111780] env[62204]: DEBUG oslo_concurrency.lockutils [req-82fb9646-0f6d-4f75-a409-9ac758becb51 req-ca03977b-4e07-42bf-aa5d-0de0c75a3747 service nova] Releasing lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 577.112818] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquired lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 577.114131] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 577.420527] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.595663] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.640218] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 577.729176] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.089872] env[62204]: DEBUG nova.compute.manager [req-55dd7c31-fa77-48d1-8d26-586be241eaa2 req-40223737-4167-4a18-b018-235dff3dfafa service nova] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Received event network-vif-deleted-152bcbb3-9c6f-45c3-96f1-a1ce974a378f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 578.100712] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Releasing lock "refresh_cache-6e999bd0-38be-42d5-b2b7-3f9196fb941e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.101150] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 578.101150] env[62204]: DEBUG nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 578.101371] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 578.232350] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Releasing lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 578.232833] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 578.232990] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 578.233300] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82b543ab-704b-43b2-8e28-a52c9f8ffb96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.245595] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff85174a-0eeb-4832-92bb-cde6d3e306c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.268316] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.271897] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cab990d6-c8e5-49ce-8274-9c59904193ed could not be found. [ 578.272083] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 578.272266] env[62204]: INFO nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Took 0.04 seconds to destroy the instance on the hypervisor. [ 578.272500] env[62204]: DEBUG oslo.service.loopingcall [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 578.273730] env[62204]: DEBUG nova.compute.manager [-] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 578.273823] env[62204]: DEBUG nova.network.neutron [-] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 578.275950] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75419f9c-b2b8-4628-bf2a-0b4d357d5468 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.284278] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc87f2c-dff3-4332-8224-ee15f2e68b28 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.316613] env[62204]: DEBUG nova.network.neutron [-] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 578.321018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa8a3be-6a95-44ae-bc96-8b0062043b90 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.326101] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93da6239-a397-4e04-bd8f-da5252124e33 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.341229] env[62204]: DEBUG nova.compute.provider_tree [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 578.774734] env[62204]: DEBUG nova.network.neutron [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.821412] env[62204]: DEBUG nova.network.neutron [-] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 578.844252] env[62204]: DEBUG nova.scheduler.client.report [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 579.080299] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "6665383b-f5fd-4fdf-b625-86cfb0869419" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.080546] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "6665383b-f5fd-4fdf-b625-86cfb0869419" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.279856] env[62204]: INFO nova.compute.manager [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] [instance: 6e999bd0-38be-42d5-b2b7-3f9196fb941e] Took 1.18 seconds to deallocate network for instance. [ 579.328074] env[62204]: INFO nova.compute.manager [-] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Took 1.05 seconds to deallocate network for instance. [ 579.329473] env[62204]: DEBUG nova.compute.claims [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 579.329716] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.350010] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 579.350010] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 579.351927] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 28.420s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.676069] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "432115aa-8999-40fe-a0cb-31433575c912" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 579.676324] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "432115aa-8999-40fe-a0cb-31433575c912" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 579.857598] env[62204]: DEBUG nova.compute.utils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 579.863206] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 579.863389] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 579.938737] env[62204]: DEBUG nova.policy [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '038338b521164eb983aa9ef21f5e8280', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec9d5ea9fca1457f9de28a37e9a224ca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 580.193328] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "48fe8f43-4ab9-41de-9b81-35b4438585ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.193328] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.323438] env[62204]: INFO nova.scheduler.client.report [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Deleted allocations for instance 6e999bd0-38be-42d5-b2b7-3f9196fb941e [ 580.363874] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 580.472963] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d874d5e0-03bc-4ace-bb24-23b2f9557e18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.484147] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee866cc-16eb-44d0-b8a1-66e9c6a42044 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.525937] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6d1dfd-f231-4c9e-8e13-ff987ecce9d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.536058] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25578fd2-aa76-4a03-8f85-db927eca0d6f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.549953] env[62204]: DEBUG nova.compute.provider_tree [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.567884] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Successfully created port: b8163b6a-dcf4-4fd7-8796-1723eeda0dbd {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 580.838706] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3774cc59-6061-4b1d-8db3-0f7f43cd19e0 tempest-ServersTestManualDisk-462903361 tempest-ServersTestManualDisk-462903361-project-member] Lock "6e999bd0-38be-42d5-b2b7-3f9196fb941e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.967s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.052485] env[62204]: DEBUG nova.scheduler.client.report [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 581.343121] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 581.381850] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 581.415186] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:36:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1916984166',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-753554206',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 581.415380] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 581.415535] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 581.415716] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 581.415857] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 581.416077] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 581.416576] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 581.416576] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 581.416748] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 581.416820] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 581.416995] env[62204]: DEBUG nova.virt.hardware [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 581.417930] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ec6637-f0f1-4fee-9bc1-7092bb72600d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.427060] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb585162-5aff-48df-884b-25421fb75719 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.563021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.209s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 581.563021] env[62204]: ERROR nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Traceback (most recent call last): [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self.driver.spawn(context, instance, image_meta, [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 581.563021] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] vm_ref = self.build_virtual_machine(instance, [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] vif_infos = vmwarevif.get_vif_info(self._session, [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] for vif in network_info: [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return self._sync_wrapper(fn, *args, **kwargs) [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self.wait() [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self[:] = self._gt.wait() [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return self._exit_event.wait() [ 581.563680] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] result = hub.switch() [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return self.greenlet.switch() [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] result = function(*args, **kwargs) [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] return func(*args, **kwargs) [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] raise e [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] nwinfo = self.network_api.allocate_for_instance( [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 581.564172] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] created_port_ids = self._update_ports_for_instance( [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] with excutils.save_and_reraise_exception(): [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] self.force_reraise() [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] raise self.value [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] updated_port = self._update_port( [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] _ensure_no_port_binding_failure(port) [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 581.564644] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] raise exception.PortBindingFailed(port_id=port['id']) [ 581.565083] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] nova.exception.PortBindingFailed: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. [ 581.565083] env[62204]: ERROR nova.compute.manager [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] [ 581.565083] env[62204]: DEBUG nova.compute.utils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 581.566546] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.968s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.571635] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Build of instance cb218f34-dec8-46ae-8659-6c37df5d51b4 was re-scheduled: Binding failed for port ac76cc56-d0e7-4296-8645-969dbb6f08ed, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 581.572435] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 581.572872] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquiring lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 581.574037] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Acquired lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.574037] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 581.872099] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.034321] env[62204]: DEBUG nova.compute.manager [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Received event network-changed-b8163b6a-dcf4-4fd7-8796-1723eeda0dbd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 582.034321] env[62204]: DEBUG nova.compute.manager [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Refreshing instance network info cache due to event network-changed-b8163b6a-dcf4-4fd7-8796-1723eeda0dbd. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 582.034321] env[62204]: DEBUG oslo_concurrency.lockutils [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] Acquiring lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.034321] env[62204]: DEBUG oslo_concurrency.lockutils [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] Acquired lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.035117] env[62204]: DEBUG nova.network.neutron [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Refreshing network info cache for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 582.098315] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 582.099185] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.102054] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.110527] env[62204]: ERROR nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 582.110527] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.110527] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.110527] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.110527] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.110527] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.110527] env[62204]: ERROR nova.compute.manager raise self.value [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.110527] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 582.110527] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.110527] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 582.111170] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.111170] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 582.111170] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 582.111170] env[62204]: ERROR nova.compute.manager [ 582.111170] env[62204]: Traceback (most recent call last): [ 582.111170] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 582.111170] env[62204]: listener.cb(fileno) [ 582.111170] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.111170] env[62204]: result = function(*args, **kwargs) [ 582.111170] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.111170] env[62204]: return func(*args, **kwargs) [ 582.111170] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 582.111170] env[62204]: raise e [ 582.111170] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.111170] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 582.111170] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.111170] env[62204]: created_port_ids = self._update_ports_for_instance( [ 582.111170] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.111170] env[62204]: with excutils.save_and_reraise_exception(): [ 582.111170] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.111170] env[62204]: self.force_reraise() [ 582.111170] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.111170] env[62204]: raise self.value [ 582.111170] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.111170] env[62204]: updated_port = self._update_port( [ 582.111170] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.111170] env[62204]: _ensure_no_port_binding_failure(port) [ 582.111170] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.111170] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 582.112187] env[62204]: nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 582.112187] env[62204]: Removing descriptor: 14 [ 582.112187] env[62204]: ERROR nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Traceback (most recent call last): [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] yield resources [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self.driver.spawn(context, instance, image_meta, [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 582.112187] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] vm_ref = self.build_virtual_machine(instance, [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] vif_infos = vmwarevif.get_vif_info(self._session, [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] for vif in network_info: [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return self._sync_wrapper(fn, *args, **kwargs) [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self.wait() [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self[:] = self._gt.wait() [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return self._exit_event.wait() [ 582.112647] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] result = hub.switch() [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return self.greenlet.switch() [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] result = function(*args, **kwargs) [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return func(*args, **kwargs) [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] raise e [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] nwinfo = self.network_api.allocate_for_instance( [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.113243] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] created_port_ids = self._update_ports_for_instance( [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] with excutils.save_and_reraise_exception(): [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self.force_reraise() [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] raise self.value [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] updated_port = self._update_port( [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] _ensure_no_port_binding_failure(port) [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.113771] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] raise exception.PortBindingFailed(port_id=port['id']) [ 582.114617] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 582.114617] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] [ 582.114617] env[62204]: INFO nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Terminating instance [ 582.115644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquiring lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.205038] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.545649] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93aa4a71-a793-425c-a715-86be65cc4552 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.553028] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4f384b-62bb-4ec7-abcd-1ddf5ed2c51a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.584553] env[62204]: DEBUG nova.network.neutron [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 582.586855] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73651be-842a-439a-a11e-2e692d2e05e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.593993] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a29d8b-9450-4783-9178-97956ab5a0c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.606860] env[62204]: DEBUG nova.compute.provider_tree [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.706176] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Releasing lock "refresh_cache-cb218f34-dec8-46ae-8659-6c37df5d51b4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 582.707200] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 582.707200] env[62204]: DEBUG nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 582.707200] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 582.715009] env[62204]: DEBUG nova.network.neutron [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.721141] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.109758] env[62204]: DEBUG nova.scheduler.client.report [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 583.217672] env[62204]: DEBUG oslo_concurrency.lockutils [req-7589ded4-8564-482c-bf51-9db3042e541d req-a06ec9e3-24f7-480b-85f5-9c1b2b0e9788 service nova] Releasing lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.217993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquired lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.218201] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 583.223148] env[62204]: DEBUG nova.network.neutron [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.615855] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.050s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 583.616520] env[62204]: ERROR nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Traceback (most recent call last): [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self.driver.spawn(context, instance, image_meta, [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self._vmops.spawn(context, instance, image_meta, injected_files, [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] vm_ref = self.build_virtual_machine(instance, [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] vif_infos = vmwarevif.get_vif_info(self._session, [ 583.616520] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] for vif in network_info: [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return self._sync_wrapper(fn, *args, **kwargs) [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self.wait() [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self[:] = self._gt.wait() [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return self._exit_event.wait() [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] result = hub.switch() [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 583.617166] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return self.greenlet.switch() [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] result = function(*args, **kwargs) [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] return func(*args, **kwargs) [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] raise e [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] nwinfo = self.network_api.allocate_for_instance( [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] created_port_ids = self._update_ports_for_instance( [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] with excutils.save_and_reraise_exception(): [ 583.617789] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] self.force_reraise() [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] raise self.value [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] updated_port = self._update_port( [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] _ensure_no_port_binding_failure(port) [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] raise exception.PortBindingFailed(port_id=port['id']) [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] nova.exception.PortBindingFailed: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. [ 583.618311] env[62204]: ERROR nova.compute.manager [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] [ 583.618690] env[62204]: DEBUG nova.compute.utils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 583.618690] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 25.831s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.621714] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Build of instance c1fc621a-bc36-4d55-beec-cdc446bc8d06 was re-scheduled: Binding failed for port 85984b65-22df-43df-9e1b-1e7316bb64d1, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 583.626017] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 583.626266] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquiring lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 583.626415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Acquired lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.626576] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 583.727216] env[62204]: INFO nova.compute.manager [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] [instance: cb218f34-dec8-46ae-8659-6c37df5d51b4] Took 1.02 seconds to deallocate network for instance. [ 583.755145] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.878213] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.061105] env[62204]: DEBUG nova.compute.manager [req-72cd3de1-2575-4049-a846-e0243e4e062b req-35f4afe8-d9f6-4f44-9f1b-1faf5bc947aa service nova] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Received event network-vif-deleted-b8163b6a-dcf4-4fd7-8796-1723eeda0dbd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 584.155274] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.321456] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.381018] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Releasing lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.381660] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 584.381734] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 584.381975] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90fe06b1-fb22-4634-86a2-2e89b65cd18c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.391653] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cf151e-16aa-4827-a628-ba7c1d81e76e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.426996] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 111c0b93-2f02-4f30-9389-0b7f9b041ee8 could not be found. [ 584.427172] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 584.427352] env[62204]: INFO nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 584.427589] env[62204]: DEBUG oslo.service.loopingcall [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 584.430370] env[62204]: DEBUG nova.compute.manager [-] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.430472] env[62204]: DEBUG nova.network.neutron [-] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 584.446352] env[62204]: DEBUG nova.network.neutron [-] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.612994] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49fcea5-6e06-4112-9146-6c05e3c5bfef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.620507] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169ffd68-dfa1-4d50-afd9-090902012385 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.650939] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629a95f4-5674-4adb-8168-8cc3c4e7d352 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.657965] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189c289e-92a4-441b-8fdc-a1d207a9bb9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.670954] env[62204]: DEBUG nova.compute.provider_tree [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.790882] env[62204]: INFO nova.scheduler.client.report [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Deleted allocations for instance cb218f34-dec8-46ae-8659-6c37df5d51b4 [ 584.823173] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Releasing lock "refresh_cache-c1fc621a-bc36-4d55-beec-cdc446bc8d06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 584.823593] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 584.823899] env[62204]: DEBUG nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 584.824189] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 584.848755] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.949176] env[62204]: DEBUG nova.network.neutron [-] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.173919] env[62204]: DEBUG nova.scheduler.client.report [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 585.305145] env[62204]: DEBUG oslo_concurrency.lockutils [None req-23a691e6-85a5-4f72-b6bb-7ece14ccdcc0 tempest-ImagesOneServerNegativeTestJSON-1329577566 tempest-ImagesOneServerNegativeTestJSON-1329577566-project-member] Lock "cb218f34-dec8-46ae-8659-6c37df5d51b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.053s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.352204] env[62204]: DEBUG nova.network.neutron [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.452990] env[62204]: INFO nova.compute.manager [-] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Took 1.02 seconds to deallocate network for instance. [ 585.457557] env[62204]: DEBUG nova.compute.claims [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 585.457777] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.682032] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.063s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 585.682311] env[62204]: ERROR nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Traceback (most recent call last): [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self.driver.spawn(context, instance, image_meta, [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self._vmops.spawn(context, instance, image_meta, injected_files, [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] vm_ref = self.build_virtual_machine(instance, [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] vif_infos = vmwarevif.get_vif_info(self._session, [ 585.682311] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] for vif in network_info: [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return self._sync_wrapper(fn, *args, **kwargs) [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self.wait() [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self[:] = self._gt.wait() [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return self._exit_event.wait() [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] result = hub.switch() [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 585.682557] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return self.greenlet.switch() [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] result = function(*args, **kwargs) [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] return func(*args, **kwargs) [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] raise e [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] nwinfo = self.network_api.allocate_for_instance( [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] created_port_ids = self._update_ports_for_instance( [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] with excutils.save_and_reraise_exception(): [ 585.682864] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] self.force_reraise() [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] raise self.value [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] updated_port = self._update_port( [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] _ensure_no_port_binding_failure(port) [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] raise exception.PortBindingFailed(port_id=port['id']) [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] nova.exception.PortBindingFailed: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. [ 585.683186] env[62204]: ERROR nova.compute.manager [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] [ 585.683457] env[62204]: DEBUG nova.compute.utils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 585.685008] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.526s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.685189] env[62204]: DEBUG nova.objects.instance [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 585.687790] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Build of instance 38104ca0-29bd-4d1e-b20c-47f76491ce32 was re-scheduled: Binding failed for port f20ea71b-20cd-48be-baa2-6fbcc4c5fcf2, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 585.688826] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 585.689114] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquiring lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 585.689310] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Acquired lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.689505] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 585.809791] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 585.855970] env[62204]: INFO nova.compute.manager [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] [instance: c1fc621a-bc36-4d55-beec-cdc446bc8d06] Took 1.03 seconds to deallocate network for instance. [ 586.222804] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.344069] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.401891] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.703218] env[62204]: DEBUG oslo_concurrency.lockutils [None req-100af4d0-564b-405d-8fc1-5f6aee3ef8f9 tempest-ServersAdmin275Test-533664072 tempest-ServersAdmin275Test-533664072-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.704391] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.554s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.706656] env[62204]: INFO nova.compute.claims [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.902400] env[62204]: INFO nova.scheduler.client.report [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Deleted allocations for instance c1fc621a-bc36-4d55-beec-cdc446bc8d06 [ 586.914040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Releasing lock "refresh_cache-38104ca0-29bd-4d1e-b20c-47f76491ce32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.914040] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 586.914040] env[62204]: DEBUG nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 586.914040] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 586.953378] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.419657] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2b7890d1-3257-4234-9a92-a14fc9f12432 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483 tempest-FloatingIPsAssociationNegativeTestJSON-1413685483-project-member] Lock "c1fc621a-bc36-4d55-beec-cdc446bc8d06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.631s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 587.456534] env[62204]: DEBUG nova.network.neutron [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.925657] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 587.959299] env[62204]: INFO nova.compute.manager [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] [instance: 38104ca0-29bd-4d1e-b20c-47f76491ce32] Took 1.05 seconds to deallocate network for instance. [ 588.012600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.013038] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.191914] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2d10eb-0ae2-4f3a-b435-d027ffa1eaf9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.201281] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5760d65f-5c69-4ab7-93c4-83c9f543da94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.236323] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff09a5c-bb30-43ad-92b5-b58e74b2ca80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.244361] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637d28d0-005f-4391-a5e1-f382a0da8838 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.261058] env[62204]: DEBUG nova.compute.provider_tree [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.455480] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 588.764502] env[62204]: DEBUG nova.scheduler.client.report [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 589.008104] env[62204]: INFO nova.scheduler.client.report [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Deleted allocations for instance 38104ca0-29bd-4d1e-b20c-47f76491ce32 [ 589.272845] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.566s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.272845] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 589.280635] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.945s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.280635] env[62204]: DEBUG nova.objects.instance [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lazy-loading 'resources' on Instance uuid 4644dfab-0758-43e6-bbcc-9930f086a4e5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 589.519672] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3d601c36-3cfa-4650-b6c8-6d620a5d5aa3 tempest-VolumesAssistedSnapshotsTest-1471861306 tempest-VolumesAssistedSnapshotsTest-1471861306-project-member] Lock "38104ca0-29bd-4d1e-b20c-47f76491ce32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.779s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.784096] env[62204]: DEBUG nova.compute.utils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.790296] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 589.790296] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 589.852581] env[62204]: DEBUG nova.policy [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53218d968e3740239272e554f5c287ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45a701446fa944a7968ca5c6e28e8205', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 590.024316] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 590.197717] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864f69a8-4f5d-4e87-b1a3-9c7a435dbe58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.206118] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d70de20-60f8-4ed0-82fa-e4307af44ee9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.238946] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e497d92-a9d8-4749-971c-bf2bbafc7c7d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.246294] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5d2e0f-e047-4f24-b3d7-449f9d1ff66c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.260202] env[62204]: DEBUG nova.compute.provider_tree [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.290319] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 590.352942] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Successfully created port: 8cc4d39e-2876-442f-9ef0-772587ec25a5 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.551993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.764877] env[62204]: DEBUG nova.scheduler.client.report [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 590.800429] env[62204]: INFO nova.virt.block_device [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Booting with volume f6a6bf7a-5697-4f65-bead-76ae912af185 at /dev/sda [ 590.855753] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fa02de5-2ab8-4b0c-86cf-8c065bd0d3aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.867790] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74d3df1-4b31-45c7-aa02-3390b6dbfbbf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.894220] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7579de1-dd7e-4266-a169-0411be39d325 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.902405] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8899b87-338f-4cf7-b04a-b9aac6659006 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.929213] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b23104-3d46-4524-b600-8fda471b85c8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.937776] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d981dc-321e-440e-a8cb-8e5f55c7a89f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.951972] env[62204]: DEBUG nova.virt.block_device [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Updating existing volume attachment record: 0aa2db24-154b-4767-af3a-139d02950e54 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 591.271348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.277022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.741s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.277022] env[62204]: INFO nova.compute.claims [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.310852] env[62204]: INFO nova.scheduler.client.report [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Deleted allocations for instance 4644dfab-0758-43e6-bbcc-9930f086a4e5 [ 591.423882] env[62204]: DEBUG nova.compute.manager [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Received event network-changed-8cc4d39e-2876-442f-9ef0-772587ec25a5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 591.424090] env[62204]: DEBUG nova.compute.manager [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Refreshing instance network info cache due to event network-changed-8cc4d39e-2876-442f-9ef0-772587ec25a5. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 591.424305] env[62204]: DEBUG oslo_concurrency.lockutils [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] Acquiring lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 591.424442] env[62204]: DEBUG oslo_concurrency.lockutils [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] Acquired lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.424826] env[62204]: DEBUG nova.network.neutron [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Refreshing network info cache for port 8cc4d39e-2876-442f-9ef0-772587ec25a5 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 591.650723] env[62204]: ERROR nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 591.650723] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 591.650723] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 591.650723] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 591.650723] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.650723] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.650723] env[62204]: ERROR nova.compute.manager raise self.value [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 591.650723] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 591.650723] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.650723] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 591.651281] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.651281] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 591.651281] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 591.651281] env[62204]: ERROR nova.compute.manager [ 591.651281] env[62204]: Traceback (most recent call last): [ 591.651281] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 591.651281] env[62204]: listener.cb(fileno) [ 591.651281] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 591.651281] env[62204]: result = function(*args, **kwargs) [ 591.651281] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 591.651281] env[62204]: return func(*args, **kwargs) [ 591.651281] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 591.651281] env[62204]: raise e [ 591.651281] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 591.651281] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 591.651281] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 591.651281] env[62204]: created_port_ids = self._update_ports_for_instance( [ 591.651281] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 591.651281] env[62204]: with excutils.save_and_reraise_exception(): [ 591.651281] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 591.651281] env[62204]: self.force_reraise() [ 591.651281] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 591.651281] env[62204]: raise self.value [ 591.651281] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 591.651281] env[62204]: updated_port = self._update_port( [ 591.651281] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 591.651281] env[62204]: _ensure_no_port_binding_failure(port) [ 591.651281] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 591.651281] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 591.651892] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 591.651892] env[62204]: Removing descriptor: 14 [ 591.818240] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8d8b7b-46ef-43f9-856c-d3b0a0da4033 tempest-ServersAdmin275Test-923225313 tempest-ServersAdmin275Test-923225313-project-member] Lock "4644dfab-0758-43e6-bbcc-9930f086a4e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.269s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.960819] env[62204]: DEBUG nova.network.neutron [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 592.212342] env[62204]: DEBUG nova.network.neutron [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 592.717988] env[62204]: DEBUG oslo_concurrency.lockutils [req-e98e82be-d7d9-43e3-ac54-ac3a7df5d2bc req-57407c70-eb7b-4a8f-af5d-d385fe4fdd2a service nova] Releasing lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 592.770723] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01f16b9-3451-4738-88df-e4d9ef067ee0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.784272] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31651b5e-d61c-4e6f-93d7-40b8a78936dc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.827226] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b821eb8-e1a8-4e73-ad46-4e8749830eb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.834298] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a26609-6752-4cc9-a7d8-3318e1a106a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.849419] env[62204]: DEBUG nova.compute.provider_tree [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.102291] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 593.102847] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 593.103058] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 593.103212] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.103388] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 593.103527] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.103698] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 593.104071] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 593.105509] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 593.105777] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 593.105974] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 593.106183] env[62204]: DEBUG nova.virt.hardware [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.107092] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcac6db-1f45-47fe-b605-bc23c91a1d6e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.116938] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c11f657-10d6-45a3-b0a0-96fa6f5f837e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.135083] env[62204]: ERROR nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Traceback (most recent call last): [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] yield resources [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self.driver.spawn(context, instance, image_meta, [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] vm_ref = self.build_virtual_machine(instance, [ 593.135083] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] for vif in network_info: [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] return self._sync_wrapper(fn, *args, **kwargs) [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self.wait() [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self[:] = self._gt.wait() [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] return self._exit_event.wait() [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 593.135385] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] current.throw(*self._exc) [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] result = function(*args, **kwargs) [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] return func(*args, **kwargs) [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] raise e [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] nwinfo = self.network_api.allocate_for_instance( [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] created_port_ids = self._update_ports_for_instance( [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] with excutils.save_and_reraise_exception(): [ 593.135676] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self.force_reraise() [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] raise self.value [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] updated_port = self._update_port( [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] _ensure_no_port_binding_failure(port) [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] raise exception.PortBindingFailed(port_id=port['id']) [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 593.135966] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] [ 593.136255] env[62204]: INFO nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Terminating instance [ 593.140022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquiring lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.140280] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquired lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.140521] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.353064] env[62204]: DEBUG nova.scheduler.client.report [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 593.511145] env[62204]: DEBUG nova.compute.manager [req-fb068266-6b37-4b3c-b1dd-34ba47ec0e52 req-753ad85a-9df6-48e7-854f-1e396cbb4a10 service nova] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Received event network-vif-deleted-8cc4d39e-2876-442f-9ef0-772587ec25a5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 593.668179] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.810580] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.863484] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.864114] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 593.870045] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.975s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.870136] env[62204]: DEBUG nova.objects.instance [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lazy-loading 'resources' on Instance uuid 6c63cc36-4f25-4196-9e74-50dcbefd37a2 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 594.313719] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Releasing lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.315347] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 594.315710] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d27e1056-7305-4eaa-802a-81af1d7f507c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.328370] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49457c86-ba10-498f-84b4-189cff328a95 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.352014] env[62204]: WARNING nova.virt.vmwareapi.driver [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 6c675e27-0de4-46bc-8017-5ee43e2efa5c could not be found. [ 594.352966] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.352966] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3136ffa5-2e98-48ef-a45a-d86a0db1c996 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.361618] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9765538f-07c3-433b-8faa-cd0b9e84d5a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.375416] env[62204]: DEBUG nova.compute.utils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 594.382219] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 594.382219] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 594.402140] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6c675e27-0de4-46bc-8017-5ee43e2efa5c could not be found. [ 594.402374] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 594.402549] env[62204]: INFO nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Took 0.09 seconds to destroy the instance on the hypervisor. [ 594.402782] env[62204]: DEBUG oslo.service.loopingcall [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.403035] env[62204]: DEBUG nova.compute.manager [-] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.403125] env[62204]: DEBUG nova.network.neutron [-] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 594.434935] env[62204]: DEBUG nova.network.neutron [-] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.519366] env[62204]: DEBUG nova.policy [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23f958211786441996120f6ed158d4b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9836cd3c64204980a8347e3fec159901', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 594.741719] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "b0180c2b-8edf-4d15-8d12-c754b73f6030" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 594.742224] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 594.879177] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adca023-b900-416a-9f63-64faf532e447 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.886959] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 594.895612] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6b0422-9285-454e-9fb5-095f733f96f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.931343] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ed38f-afb7-40ed-bfb3-c36232207d53 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.939741] env[62204]: DEBUG nova.network.neutron [-] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.943021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6156250b-db76-4957-9d0b-ce5c46bedf4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.962707] env[62204]: DEBUG nova.compute.provider_tree [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.276406] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Successfully created port: 256afa13-f081-45fc-9096-bb9738af9937 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.441684] env[62204]: INFO nova.compute.manager [-] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Took 1.04 seconds to deallocate network for instance. [ 595.462518] env[62204]: DEBUG nova.scheduler.client.report [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.898664] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 595.932688] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 595.933060] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 595.933247] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 595.933437] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 595.933751] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 595.933912] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 595.934169] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 595.937559] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 595.937559] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 595.937559] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 595.937559] env[62204]: DEBUG nova.virt.hardware [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.937559] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dc5650-a19f-49dc-a869-fca2ac5d7ae8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.944642] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c3be25-ac76-457e-bd94-092c49c13ecc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.967874] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.970395] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.943s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.991264] env[62204]: INFO nova.scheduler.client.report [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Deleted allocations for instance 6c63cc36-4f25-4196-9e74-50dcbefd37a2 [ 596.035879] env[62204]: INFO nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Took 0.59 seconds to detach 1 volumes for instance. [ 596.038173] env[62204]: DEBUG nova.compute.claims [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 596.038577] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 596.506577] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ce70453a-a1f9-4885-a40e-6cf99ca02812 tempest-ServerDiagnosticsV248Test-1203616758 tempest-ServerDiagnosticsV248Test-1203616758-project-member] Lock "6c63cc36-4f25-4196-9e74-50dcbefd37a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.351s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 596.910943] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c82c49-55eb-4060-b30e-5a529caaab85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.920973] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3bcd9c-774d-4f01-9123-3a3b102bf8b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.964690] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64609a73-e62d-4f21-9229-b8f0a6fd7729 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.972485] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e920a3-95b6-4b76-9407-b3e5536c7662 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.986665] env[62204]: DEBUG nova.compute.provider_tree [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.309577] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "12656a79-a836-452c-8f94-c8e142c9ec2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 597.309904] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 597.490715] env[62204]: DEBUG nova.scheduler.client.report [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 597.655015] env[62204]: DEBUG nova.compute.manager [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Received event network-changed-256afa13-f081-45fc-9096-bb9738af9937 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 597.655015] env[62204]: DEBUG nova.compute.manager [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Refreshing instance network info cache due to event network-changed-256afa13-f081-45fc-9096-bb9738af9937. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 597.655015] env[62204]: DEBUG oslo_concurrency.lockutils [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] Acquiring lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 597.655015] env[62204]: DEBUG oslo_concurrency.lockutils [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] Acquired lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.655015] env[62204]: DEBUG nova.network.neutron [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Refreshing network info cache for port 256afa13-f081-45fc-9096-bb9738af9937 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 597.704874] env[62204]: ERROR nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 597.704874] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.704874] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 597.704874] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 597.704874] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.704874] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.704874] env[62204]: ERROR nova.compute.manager raise self.value [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 597.704874] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 597.704874] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.704874] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 597.705378] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.705378] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 597.705378] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 597.705378] env[62204]: ERROR nova.compute.manager [ 597.705378] env[62204]: Traceback (most recent call last): [ 597.705378] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 597.705378] env[62204]: listener.cb(fileno) [ 597.705378] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.705378] env[62204]: result = function(*args, **kwargs) [ 597.705378] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 597.705378] env[62204]: return func(*args, **kwargs) [ 597.705378] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.705378] env[62204]: raise e [ 597.705378] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.705378] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 597.705378] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 597.705378] env[62204]: created_port_ids = self._update_ports_for_instance( [ 597.705378] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 597.705378] env[62204]: with excutils.save_and_reraise_exception(): [ 597.705378] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.705378] env[62204]: self.force_reraise() [ 597.705378] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.705378] env[62204]: raise self.value [ 597.705378] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 597.705378] env[62204]: updated_port = self._update_port( [ 597.705378] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.705378] env[62204]: _ensure_no_port_binding_failure(port) [ 597.705378] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.705378] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 597.706305] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 597.706305] env[62204]: Removing descriptor: 14 [ 597.706305] env[62204]: ERROR nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Traceback (most recent call last): [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] yield resources [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self.driver.spawn(context, instance, image_meta, [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self._vmops.spawn(context, instance, image_meta, injected_files, [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 597.706305] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] vm_ref = self.build_virtual_machine(instance, [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] vif_infos = vmwarevif.get_vif_info(self._session, [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] for vif in network_info: [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return self._sync_wrapper(fn, *args, **kwargs) [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self.wait() [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self[:] = self._gt.wait() [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return self._exit_event.wait() [ 597.706640] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] result = hub.switch() [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return self.greenlet.switch() [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] result = function(*args, **kwargs) [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return func(*args, **kwargs) [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] raise e [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] nwinfo = self.network_api.allocate_for_instance( [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 597.707091] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] created_port_ids = self._update_ports_for_instance( [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] with excutils.save_and_reraise_exception(): [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self.force_reraise() [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] raise self.value [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] updated_port = self._update_port( [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] _ensure_no_port_binding_failure(port) [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 597.707437] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] raise exception.PortBindingFailed(port_id=port['id']) [ 597.707747] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 597.707747] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] [ 597.707747] env[62204]: INFO nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Terminating instance [ 597.708386] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquiring lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.410291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.440s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.410903] env[62204]: ERROR nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Traceback (most recent call last): [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self.driver.spawn(context, instance, image_meta, [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] vm_ref = self.build_virtual_machine(instance, [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] vif_infos = vmwarevif.get_vif_info(self._session, [ 598.410903] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] for vif in network_info: [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return self._sync_wrapper(fn, *args, **kwargs) [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self.wait() [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self[:] = self._gt.wait() [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return self._exit_event.wait() [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] result = hub.switch() [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 598.411196] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return self.greenlet.switch() [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] result = function(*args, **kwargs) [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] return func(*args, **kwargs) [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] raise e [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] nwinfo = self.network_api.allocate_for_instance( [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] created_port_ids = self._update_ports_for_instance( [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] with excutils.save_and_reraise_exception(): [ 598.411541] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] self.force_reraise() [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] raise self.value [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] updated_port = self._update_port( [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] _ensure_no_port_binding_failure(port) [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] raise exception.PortBindingFailed(port_id=port['id']) [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] nova.exception.PortBindingFailed: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. [ 598.411888] env[62204]: ERROR nova.compute.manager [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] [ 598.412228] env[62204]: DEBUG nova.compute.utils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 598.415460] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Build of instance ac70a103-cb49-4cef-8069-dd0bb265633a was re-scheduled: Binding failed for port bc7b1ad7-07e2-41b7-91b5-a9c02b77dc48, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 598.415460] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 598.415460] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.415688] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.415722] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 598.416684] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.346s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.420152] env[62204]: INFO nova.compute.claims [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.446295] env[62204]: DEBUG nova.network.neutron [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 598.628894] env[62204]: DEBUG nova.network.neutron [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.973795] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.132311] env[62204]: DEBUG oslo_concurrency.lockutils [req-88d91690-3f3d-468f-84c4-eaa959ef41da req-45700534-8b2d-406e-85e0-9d9afb0231a9 service nova] Releasing lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.132311] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquired lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.132311] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 599.145193] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.648827] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-ac70a103-cb49-4cef-8069-dd0bb265633a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.648827] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 599.648827] env[62204]: DEBUG nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 599.648827] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 599.669019] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.686408] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 599.817067] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.830525] env[62204]: DEBUG nova.compute.manager [req-3cc65f64-70e2-4966-8283-9b5c0ac5cab2 req-30cdaa37-992d-47ad-9c50-a34b4914a0b5 service nova] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Received event network-vif-deleted-256afa13-f081-45fc-9096-bb9738af9937 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 599.955322] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94408fff-1ac0-4544-8b60-fa29940cc6a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.963574] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13dae5ac-4ad2-4789-8184-53ff8470a7ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.998815] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de65180b-face-4909-94e7-577f3374aea0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.009454] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd241568-4e1f-4214-8ee9-68f02b710ab4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.025885] env[62204]: DEBUG nova.compute.provider_tree [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.028571] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.028781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.189061] env[62204]: DEBUG nova.network.neutron [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 600.327278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Releasing lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 600.327703] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 600.327896] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 600.328210] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32fe89c3-0fbe-4711-a0f4-eac8657e41fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.337462] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb31091-e6cf-428a-a7e8-e81f011fd28f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.358604] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a2b5dbe-ed48-40b6-ba72-a06b14e31696 could not be found. [ 600.358819] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 600.359474] env[62204]: INFO nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Took 0.03 seconds to destroy the instance on the hypervisor. [ 600.359474] env[62204]: DEBUG oslo.service.loopingcall [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 600.359474] env[62204]: DEBUG nova.compute.manager [-] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 600.359610] env[62204]: DEBUG nova.network.neutron [-] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 600.378786] env[62204]: DEBUG nova.network.neutron [-] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 600.529549] env[62204]: DEBUG nova.scheduler.client.report [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.693950] env[62204]: INFO nova.compute.manager [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: ac70a103-cb49-4cef-8069-dd0bb265633a] Took 1.04 seconds to deallocate network for instance. [ 600.883649] env[62204]: DEBUG nova.network.neutron [-] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.035788] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 601.036391] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 601.039308] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.710s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 601.386869] env[62204]: INFO nova.compute.manager [-] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Took 1.03 seconds to deallocate network for instance. [ 601.390514] env[62204]: DEBUG nova.compute.claims [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 601.393221] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 601.544123] env[62204]: DEBUG nova.compute.utils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.549398] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.549679] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.655772] env[62204]: DEBUG nova.policy [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcf1ff760d0e4da198c7f065fe7d92ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '404131889fdc4e1fb75608969994c421', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.740148] env[62204]: INFO nova.scheduler.client.report [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted allocations for instance ac70a103-cb49-4cef-8069-dd0bb265633a [ 602.050024] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 602.073840] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041ce16c-2f19-4a27-b51a-1344c7232df3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.084530] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bbc105-d8a4-423e-ac0d-b8d6a9ce0be4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.121742] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9632a28a-b568-4990-9de9-11e9a949d1cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.130095] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96548e4-c6be-45df-aaf8-b0c01c4847fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.146574] env[62204]: DEBUG nova.compute.provider_tree [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 602.185556] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Successfully created port: 44d69fe8-1077-4988-8ee0-a3208baaca5f {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.257658] env[62204]: DEBUG oslo_concurrency.lockutils [None req-373f1801-e61c-4a3e-a767-77b9a9508616 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "ac70a103-cb49-4cef-8069-dd0bb265633a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.199s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.649905] env[62204]: DEBUG nova.scheduler.client.report [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.763591] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 603.062185] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 603.102690] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.102690] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.102849] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.103035] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.103194] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.103340] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.103540] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.103777] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.103895] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.104082] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.104278] env[62204]: DEBUG nova.virt.hardware [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.105325] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf5d773-9022-4c70-a49d-db0bf905cd57 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.117049] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3801483a-e734-40ba-b01c-4f052461709e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.156968] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.117s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.157653] env[62204]: ERROR nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Traceback (most recent call last): [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self.driver.spawn(context, instance, image_meta, [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] vm_ref = self.build_virtual_machine(instance, [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] vif_infos = vmwarevif.get_vif_info(self._session, [ 603.157653] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] for vif in network_info: [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return self._sync_wrapper(fn, *args, **kwargs) [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self.wait() [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self[:] = self._gt.wait() [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return self._exit_event.wait() [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] result = hub.switch() [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 603.158173] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return self.greenlet.switch() [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] result = function(*args, **kwargs) [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] return func(*args, **kwargs) [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] raise e [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] nwinfo = self.network_api.allocate_for_instance( [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] created_port_ids = self._update_ports_for_instance( [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] with excutils.save_and_reraise_exception(): [ 603.158812] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] self.force_reraise() [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] raise self.value [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] updated_port = self._update_port( [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] _ensure_no_port_binding_failure(port) [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] raise exception.PortBindingFailed(port_id=port['id']) [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] nova.exception.PortBindingFailed: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. [ 603.159345] env[62204]: ERROR nova.compute.manager [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] [ 603.160026] env[62204]: DEBUG nova.compute.utils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 603.160026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.289s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 603.161445] env[62204]: INFO nova.compute.claims [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.166768] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Build of instance cab990d6-c8e5-49ce-8274-9c59904193ed was re-scheduled: Binding failed for port 152bcbb3-9c6f-45c3-96f1-a1ce974a378f, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 603.167357] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 603.167627] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquiring lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.167819] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Acquired lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.168038] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.288326] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.698202] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.775124] env[62204]: DEBUG nova.compute.manager [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Received event network-changed-44d69fe8-1077-4988-8ee0-a3208baaca5f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.775124] env[62204]: DEBUG nova.compute.manager [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Refreshing instance network info cache due to event network-changed-44d69fe8-1077-4988-8ee0-a3208baaca5f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 603.775383] env[62204]: DEBUG oslo_concurrency.lockutils [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] Acquiring lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 603.775434] env[62204]: DEBUG oslo_concurrency.lockutils [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] Acquired lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.775585] env[62204]: DEBUG nova.network.neutron [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Refreshing network info cache for port 44d69fe8-1077-4988-8ee0-a3208baaca5f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 603.783620] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.228810] env[62204]: ERROR nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 604.228810] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.228810] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.228810] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.228810] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.228810] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.228810] env[62204]: ERROR nova.compute.manager raise self.value [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.228810] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 604.228810] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.228810] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 604.229311] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.229311] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 604.229311] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 604.229311] env[62204]: ERROR nova.compute.manager [ 604.231155] env[62204]: Traceback (most recent call last): [ 604.231155] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 604.231155] env[62204]: listener.cb(fileno) [ 604.231155] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.231155] env[62204]: result = function(*args, **kwargs) [ 604.231155] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.231155] env[62204]: return func(*args, **kwargs) [ 604.231155] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.231155] env[62204]: raise e [ 604.231155] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.231155] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 604.231155] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.231155] env[62204]: created_port_ids = self._update_ports_for_instance( [ 604.231155] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.231155] env[62204]: with excutils.save_and_reraise_exception(): [ 604.231155] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.231155] env[62204]: self.force_reraise() [ 604.231155] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.231155] env[62204]: raise self.value [ 604.231155] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.231155] env[62204]: updated_port = self._update_port( [ 604.231155] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.231155] env[62204]: _ensure_no_port_binding_failure(port) [ 604.231155] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.231155] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 604.231155] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 604.231155] env[62204]: Removing descriptor: 14 [ 604.231856] env[62204]: ERROR nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Traceback (most recent call last): [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] yield resources [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self.driver.spawn(context, instance, image_meta, [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] vm_ref = self.build_virtual_machine(instance, [ 604.231856] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] for vif in network_info: [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return self._sync_wrapper(fn, *args, **kwargs) [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self.wait() [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self[:] = self._gt.wait() [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return self._exit_event.wait() [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.232110] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] result = hub.switch() [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return self.greenlet.switch() [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] result = function(*args, **kwargs) [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return func(*args, **kwargs) [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] raise e [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] nwinfo = self.network_api.allocate_for_instance( [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] created_port_ids = self._update_ports_for_instance( [ 604.232396] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] with excutils.save_and_reraise_exception(): [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self.force_reraise() [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] raise self.value [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] updated_port = self._update_port( [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] _ensure_no_port_binding_failure(port) [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] raise exception.PortBindingFailed(port_id=port['id']) [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 604.232676] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] [ 604.232971] env[62204]: INFO nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Terminating instance [ 604.236067] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquiring lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.285652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Releasing lock "refresh_cache-cab990d6-c8e5-49ce-8274-9c59904193ed" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.285990] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 604.286068] env[62204]: DEBUG nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 604.286201] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 604.305915] env[62204]: DEBUG nova.network.neutron [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.316282] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.385893] env[62204]: DEBUG nova.network.neutron [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.646962] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec353f9d-1b97-4c73-a445-cd0836a3b176 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.656102] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17687b44-ca45-46d8-ad0a-4adb6baf932c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.686961] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38632940-fa65-4a8b-9230-b50c870d9c2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.694326] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9d5edb-4cea-460b-bacd-2932f5dd3de4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.708497] env[62204]: DEBUG nova.compute.provider_tree [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.819425] env[62204]: DEBUG nova.network.neutron [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.892613] env[62204]: DEBUG oslo_concurrency.lockutils [req-4cfda1e5-afb8-432f-9c04-47594520ae1e req-a72c2899-bc7e-486f-895f-772ce518df38 service nova] Releasing lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.893070] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquired lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.894645] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.212903] env[62204]: DEBUG nova.scheduler.client.report [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.325127] env[62204]: INFO nova.compute.manager [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] [instance: cab990d6-c8e5-49ce-8274-9c59904193ed] Took 1.04 seconds to deallocate network for instance. [ 605.423830] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.500531] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.603937] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.604534] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.721819] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.722413] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.726130] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.268s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.861689] env[62204]: DEBUG nova.compute.manager [req-a358bed7-f97b-462f-834d-fa370df0213c req-02f7d258-23f6-4aaa-9340-2ac169dc959c service nova] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Received event network-vif-deleted-44d69fe8-1077-4988-8ee0-a3208baaca5f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 605.980986] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.981273] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.005276] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Releasing lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.005670] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 606.005868] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.006202] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dff5ef1-e08a-4481-9427-235fa01b8d5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.015815] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0da2d70-34cd-4c6a-8d57-42d81256d96d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.037672] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 571b574b-27f2-4e95-9309-fd3097fb4f64 could not be found. [ 606.037921] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.038129] env[62204]: INFO nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Took 0.03 seconds to destroy the instance on the hypervisor. [ 606.038382] env[62204]: DEBUG oslo.service.loopingcall [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.038586] env[62204]: DEBUG nova.compute.manager [-] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.038678] env[62204]: DEBUG nova.network.neutron [-] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 606.055264] env[62204]: DEBUG nova.network.neutron [-] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.230767] env[62204]: DEBUG nova.compute.utils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.232836] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 606.232836] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 606.300649] env[62204]: DEBUG nova.policy [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f4472d9f7b24320922d1f26be172f0b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'faf01fc0710e4ed1aaffaab23bed469d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 606.374783] env[62204]: INFO nova.scheduler.client.report [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Deleted allocations for instance cab990d6-c8e5-49ce-8274-9c59904193ed [ 606.557368] env[62204]: DEBUG nova.network.neutron [-] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.699437] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1464414d-1886-446c-a480-1611df5893e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.709866] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2f2621-689a-47f0-bc60-4ad12372ef8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.742697] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 606.749318] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7641cc06-bbcd-4d90-a251-2eb1272e22a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.754025] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608fa571-6e40-4d13-a044-07354331598e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.772528] env[62204]: DEBUG nova.compute.provider_tree [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.815495] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Successfully created port: d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.891034] env[62204]: DEBUG oslo_concurrency.lockutils [None req-79b8d144-d760-4537-b1fa-8e6d71247b16 tempest-DeleteServersAdminTestJSON-354717869 tempest-DeleteServersAdminTestJSON-354717869-project-member] Lock "cab990d6-c8e5-49ce-8274-9c59904193ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.941s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.060437] env[62204]: INFO nova.compute.manager [-] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Took 1.02 seconds to deallocate network for instance. [ 607.062819] env[62204]: DEBUG nova.compute.claims [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 607.063010] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.276172] env[62204]: DEBUG nova.scheduler.client.report [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.399314] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 607.755839] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.782030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.056s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.782428] env[62204]: ERROR nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Traceback (most recent call last): [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self.driver.spawn(context, instance, image_meta, [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] vm_ref = self.build_virtual_machine(instance, [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.782428] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] for vif in network_info: [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return self._sync_wrapper(fn, *args, **kwargs) [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self.wait() [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self[:] = self._gt.wait() [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return self._exit_event.wait() [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] result = hub.switch() [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.782752] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return self.greenlet.switch() [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] result = function(*args, **kwargs) [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] return func(*args, **kwargs) [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] raise e [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] nwinfo = self.network_api.allocate_for_instance( [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] created_port_ids = self._update_ports_for_instance( [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] with excutils.save_and_reraise_exception(): [ 607.783138] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] self.force_reraise() [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] raise self.value [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] updated_port = self._update_port( [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] _ensure_no_port_binding_failure(port) [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] raise exception.PortBindingFailed(port_id=port['id']) [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] nova.exception.PortBindingFailed: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. [ 607.783471] env[62204]: ERROR nova.compute.manager [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] [ 607.783785] env[62204]: DEBUG nova.compute.utils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 607.785525] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.442s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.787965] env[62204]: INFO nova.compute.claims [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.791810] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Build of instance 111c0b93-2f02-4f30-9389-0b7f9b041ee8 was re-scheduled: Binding failed for port b8163b6a-dcf4-4fd7-8796-1723eeda0dbd, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 607.792335] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 607.792641] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquiring lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.792813] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Acquired lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.793039] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.799839] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.799839] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.799839] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.800045] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.800045] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.800045] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.800045] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.800045] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.800255] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.800255] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.800255] env[62204]: DEBUG nova.virt.hardware [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.802826] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb52b8f-0978-4d4a-809c-6f837586aff7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.816734] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81338dbd-ca3e-426a-8f50-9c6943848cc7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.840122] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.928617] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.934116] env[62204]: DEBUG nova.compute.manager [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] [instance: 72514005-1023-4db6-9e51-9b0855083411] Received event network-changed-d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 607.934314] env[62204]: DEBUG nova.compute.manager [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] [instance: 72514005-1023-4db6-9e51-9b0855083411] Refreshing instance network info cache due to event network-changed-d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 607.934523] env[62204]: DEBUG oslo_concurrency.lockutils [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] Acquiring lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.934660] env[62204]: DEBUG oslo_concurrency.lockutils [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] Acquired lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.934818] env[62204]: DEBUG nova.network.neutron [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] [instance: 72514005-1023-4db6-9e51-9b0855083411] Refreshing network info cache for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 607.974545] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.165071] env[62204]: ERROR nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 608.165071] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.165071] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 608.165071] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 608.165071] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.165071] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.165071] env[62204]: ERROR nova.compute.manager raise self.value [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 608.165071] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 608.165071] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.165071] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 608.165502] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.165502] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 608.165502] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 608.165502] env[62204]: ERROR nova.compute.manager [ 608.165502] env[62204]: Traceback (most recent call last): [ 608.165502] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 608.165502] env[62204]: listener.cb(fileno) [ 608.165502] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.165502] env[62204]: result = function(*args, **kwargs) [ 608.165502] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.165502] env[62204]: return func(*args, **kwargs) [ 608.165502] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.165502] env[62204]: raise e [ 608.165502] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.165502] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 608.165502] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 608.165502] env[62204]: created_port_ids = self._update_ports_for_instance( [ 608.165502] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 608.165502] env[62204]: with excutils.save_and_reraise_exception(): [ 608.165502] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.165502] env[62204]: self.force_reraise() [ 608.165502] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.165502] env[62204]: raise self.value [ 608.165502] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 608.165502] env[62204]: updated_port = self._update_port( [ 608.165502] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.165502] env[62204]: _ensure_no_port_binding_failure(port) [ 608.165502] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.165502] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 608.166424] env[62204]: nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 608.166424] env[62204]: Removing descriptor: 14 [ 608.166424] env[62204]: ERROR nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] Traceback (most recent call last): [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] yield resources [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self.driver.spawn(context, instance, image_meta, [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self._vmops.spawn(context, instance, image_meta, injected_files, [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 608.166424] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] vm_ref = self.build_virtual_machine(instance, [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] vif_infos = vmwarevif.get_vif_info(self._session, [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] for vif in network_info: [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return self._sync_wrapper(fn, *args, **kwargs) [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self.wait() [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self[:] = self._gt.wait() [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return self._exit_event.wait() [ 608.166773] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] result = hub.switch() [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return self.greenlet.switch() [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] result = function(*args, **kwargs) [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return func(*args, **kwargs) [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] raise e [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] nwinfo = self.network_api.allocate_for_instance( [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 608.167127] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] created_port_ids = self._update_ports_for_instance( [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] with excutils.save_and_reraise_exception(): [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self.force_reraise() [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] raise self.value [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] updated_port = self._update_port( [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] _ensure_no_port_binding_failure(port) [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 608.167509] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] raise exception.PortBindingFailed(port_id=port['id']) [ 608.167800] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 608.167800] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] [ 608.167800] env[62204]: INFO nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Terminating instance [ 608.170767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquiring lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.471601] env[62204]: DEBUG nova.network.neutron [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.477064] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Releasing lock "refresh_cache-111c0b93-2f02-4f30-9389-0b7f9b041ee8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.477697] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 608.477697] env[62204]: DEBUG nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.477697] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 608.492789] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.558958] env[62204]: DEBUG nova.network.neutron [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] [instance: 72514005-1023-4db6-9e51-9b0855083411] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.995200] env[62204]: DEBUG nova.network.neutron [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.061305] env[62204]: DEBUG oslo_concurrency.lockutils [req-429373b2-71f8-44a3-a051-df5f5ecdc368 req-84ea6b28-791c-49c2-b75b-7a26d03ed7f5 service nova] Releasing lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.061682] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquired lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.061856] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 609.196665] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401bcd27-4f27-49aa-8a33-b7e08a9c87f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.204679] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce80768b-4d2b-4069-8954-a46015cb2656 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.240045] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71ac64f-1a2e-4ff5-b496-4376ace62f89 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.247415] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959fc44b-9bb6-4721-803b-ce3e7354c01c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.260322] env[62204]: DEBUG nova.compute.provider_tree [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.498394] env[62204]: INFO nova.compute.manager [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] [instance: 111c0b93-2f02-4f30-9389-0b7f9b041ee8] Took 1.02 seconds to deallocate network for instance. [ 609.581352] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 609.681669] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.762809] env[62204]: DEBUG nova.scheduler.client.report [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.996461] env[62204]: DEBUG nova.compute.manager [req-dd80c788-6313-45f9-b16c-d82333628569 req-d2bfeef1-c8aa-4193-a697-28ac1b26527e service nova] [instance: 72514005-1023-4db6-9e51-9b0855083411] Received event network-vif-deleted-d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 610.186562] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Releasing lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.186973] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 610.187181] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 610.187476] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ccab628-d6cc-4e83-864b-9ef7ad5d1c5e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.196046] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74907f0a-9898-4038-9207-31afddb5a206 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.218142] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72514005-1023-4db6-9e51-9b0855083411 could not be found. [ 610.218362] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 610.218542] env[62204]: INFO nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Took 0.03 seconds to destroy the instance on the hypervisor. [ 610.218779] env[62204]: DEBUG oslo.service.loopingcall [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 610.218993] env[62204]: DEBUG nova.compute.manager [-] [instance: 72514005-1023-4db6-9e51-9b0855083411] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 610.219095] env[62204]: DEBUG nova.network.neutron [-] [instance: 72514005-1023-4db6-9e51-9b0855083411] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 610.233034] env[62204]: DEBUG nova.network.neutron [-] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 610.268051] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.482s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.268570] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 610.271752] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.816s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.273264] env[62204]: INFO nova.compute.claims [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.539910] env[62204]: INFO nova.scheduler.client.report [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Deleted allocations for instance 111c0b93-2f02-4f30-9389-0b7f9b041ee8 [ 610.737777] env[62204]: DEBUG nova.network.neutron [-] [instance: 72514005-1023-4db6-9e51-9b0855083411] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.775121] env[62204]: DEBUG nova.compute.utils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.775121] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.775121] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 610.849479] env[62204]: DEBUG nova.policy [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52a95b845f70425caf3b104b0e97c591', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1453f62c6bc44307bab2b0d10271ba45', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 611.048101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c2274f9a-e2b4-4c33-8f0d-6d351e0bddb1 tempest-ServersWithSpecificFlavorTestJSON-266623840 tempest-ServersWithSpecificFlavorTestJSON-266623840-project-member] Lock "111c0b93-2f02-4f30-9389-0b7f9b041ee8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.077s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.239423] env[62204]: INFO nova.compute.manager [-] [instance: 72514005-1023-4db6-9e51-9b0855083411] Took 1.02 seconds to deallocate network for instance. [ 611.242559] env[62204]: DEBUG nova.compute.claims [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 611.242744] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 611.278584] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 611.299032] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Successfully created port: aa5a81de-d840-4752-8820-0cc5cfdb186f {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.553979] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 611.711061] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69fb5ba-a7b2-46ff-9574-af10ff948a6d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.719372] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8fbc20-710f-4d44-b86c-70d4827ca1ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.750250] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23c8052-601b-4ea1-ab8e-947068c2fe4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.759153] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315b56dc-2e06-469e-aba2-cef96de03256 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.771914] env[62204]: DEBUG nova.compute.provider_tree [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.091107] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.283263] env[62204]: DEBUG nova.scheduler.client.report [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 612.293277] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 612.322884] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.323250] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.323358] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.323476] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.323617] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.323761] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.324045] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.324215] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.324384] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.324545] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.324717] env[62204]: DEBUG nova.virt.hardware [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.325905] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c47c3c-e9aa-49be-ac02-544a5fd55ff7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.335665] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0fb13e-89c6-4687-9745-e16dd70f0a2b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.759397] env[62204]: DEBUG nova.compute.manager [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Received event network-changed-aa5a81de-d840-4752-8820-0cc5cfdb186f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 612.759507] env[62204]: DEBUG nova.compute.manager [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Refreshing instance network info cache due to event network-changed-aa5a81de-d840-4752-8820-0cc5cfdb186f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 612.759710] env[62204]: DEBUG oslo_concurrency.lockutils [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] Acquiring lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.759874] env[62204]: DEBUG oslo_concurrency.lockutils [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] Acquired lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.760068] env[62204]: DEBUG nova.network.neutron [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Refreshing network info cache for port aa5a81de-d840-4752-8820-0cc5cfdb186f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 612.790030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.790030] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 612.792933] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.241s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.795140] env[62204]: INFO nova.compute.claims [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.900891] env[62204]: ERROR nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 612.900891] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.900891] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.900891] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.900891] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.900891] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.900891] env[62204]: ERROR nova.compute.manager raise self.value [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.900891] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 612.900891] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.900891] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 612.901379] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.901379] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 612.901379] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 612.901379] env[62204]: ERROR nova.compute.manager [ 612.901379] env[62204]: Traceback (most recent call last): [ 612.901379] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 612.901379] env[62204]: listener.cb(fileno) [ 612.901379] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.901379] env[62204]: result = function(*args, **kwargs) [ 612.901379] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.901379] env[62204]: return func(*args, **kwargs) [ 612.901379] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.901379] env[62204]: raise e [ 612.901379] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.901379] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 612.901379] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.901379] env[62204]: created_port_ids = self._update_ports_for_instance( [ 612.901379] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.901379] env[62204]: with excutils.save_and_reraise_exception(): [ 612.901379] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.901379] env[62204]: self.force_reraise() [ 612.901379] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.901379] env[62204]: raise self.value [ 612.901379] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.901379] env[62204]: updated_port = self._update_port( [ 612.901379] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.901379] env[62204]: _ensure_no_port_binding_failure(port) [ 612.901379] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.901379] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 612.902128] env[62204]: nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 612.902128] env[62204]: Removing descriptor: 14 [ 612.902128] env[62204]: ERROR nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Traceback (most recent call last): [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] yield resources [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self.driver.spawn(context, instance, image_meta, [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.902128] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] vm_ref = self.build_virtual_machine(instance, [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] for vif in network_info: [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return self._sync_wrapper(fn, *args, **kwargs) [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self.wait() [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self[:] = self._gt.wait() [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return self._exit_event.wait() [ 612.902415] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] result = hub.switch() [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return self.greenlet.switch() [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] result = function(*args, **kwargs) [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return func(*args, **kwargs) [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] raise e [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] nwinfo = self.network_api.allocate_for_instance( [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.902762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] created_port_ids = self._update_ports_for_instance( [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] with excutils.save_and_reraise_exception(): [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self.force_reraise() [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] raise self.value [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] updated_port = self._update_port( [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] _ensure_no_port_binding_failure(port) [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.903117] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] raise exception.PortBindingFailed(port_id=port['id']) [ 612.903381] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 612.903381] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] [ 612.903381] env[62204]: INFO nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Terminating instance [ 612.904965] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquiring lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 613.289707] env[62204]: DEBUG nova.network.neutron [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.301382] env[62204]: DEBUG nova.compute.utils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.309387] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 613.309387] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 613.382526] env[62204]: DEBUG nova.network.neutron [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.429480] env[62204]: DEBUG nova.policy [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '83f18922c806425294f40068c35058a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '150ae78bb04a4676aa9d080a357986d3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 613.820468] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 613.885080] env[62204]: DEBUG oslo_concurrency.lockutils [req-1729e2a7-b1f3-4a41-b319-0761ece30560 req-5d675ccf-764d-4492-a5a2-033bfdec9c65 service nova] Releasing lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.888329] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquired lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.889029] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 614.233198] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Successfully created port: e16e7a81-36b8-4855-a5a7-6de05aced016 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.340828] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0dc85d-34a9-4690-80b1-b4021a75c48c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.350186] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb7f25e-ade0-4230-82be-6ca924c35292 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.386419] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f37e81c-ff6e-48e0-8bf3-acd6df33fa0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.399048] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c9d9b0-ad85-4783-85e3-02606b96864e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.414447] env[62204]: DEBUG nova.compute.provider_tree [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.416850] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.635639] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.829961] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 614.860410] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:36:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='afe299ef-8b06-4863-b211-7496a07e7adf',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1895858693',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 614.861454] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 614.861855] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.862568] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 614.863992] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.864259] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 614.867299] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 614.867503] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 614.867742] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 614.867869] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 614.868058] env[62204]: DEBUG nova.virt.hardware [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 614.868909] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadd864a-21c4-45fe-a648-cf8e117fbe52 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.877429] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724e5e66-e115-463b-9924-7fd1c977075e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.917458] env[62204]: DEBUG nova.scheduler.client.report [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.938312] env[62204]: DEBUG nova.compute.manager [req-ee2e7452-67f6-407f-bbf7-974dcb7e2f95 req-5c4bcdfe-23a2-41d1-a65d-18581facb37d service nova] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Received event network-vif-deleted-aa5a81de-d840-4752-8820-0cc5cfdb186f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.141135] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Releasing lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.143909] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 615.143909] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 615.143909] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-416459ea-0838-4111-b57a-0dfa32eb39d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.152065] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90537f5b-48f0-4594-a85e-bc1e9c0a7bb0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.176475] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47409cd0-db33-4a94-b806-1799a6f7e98f could not be found. [ 615.178076] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.178076] env[62204]: INFO nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 615.178076] env[62204]: DEBUG oslo.service.loopingcall [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 615.178076] env[62204]: DEBUG nova.compute.manager [-] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 615.178076] env[62204]: DEBUG nova.network.neutron [-] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 615.205885] env[62204]: DEBUG nova.network.neutron [-] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.310188] env[62204]: ERROR nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 615.310188] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.310188] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 615.310188] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 615.310188] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.310188] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.310188] env[62204]: ERROR nova.compute.manager raise self.value [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 615.310188] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 615.310188] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.310188] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 615.310746] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.310746] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 615.310746] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 615.310746] env[62204]: ERROR nova.compute.manager [ 615.310746] env[62204]: Traceback (most recent call last): [ 615.310746] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 615.310746] env[62204]: listener.cb(fileno) [ 615.310746] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.310746] env[62204]: result = function(*args, **kwargs) [ 615.310746] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 615.310746] env[62204]: return func(*args, **kwargs) [ 615.310746] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.310746] env[62204]: raise e [ 615.310746] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.310746] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 615.310746] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 615.310746] env[62204]: created_port_ids = self._update_ports_for_instance( [ 615.310746] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 615.310746] env[62204]: with excutils.save_and_reraise_exception(): [ 615.310746] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.310746] env[62204]: self.force_reraise() [ 615.310746] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.310746] env[62204]: raise self.value [ 615.310746] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 615.310746] env[62204]: updated_port = self._update_port( [ 615.310746] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.310746] env[62204]: _ensure_no_port_binding_failure(port) [ 615.310746] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.310746] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 615.311463] env[62204]: nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 615.311463] env[62204]: Removing descriptor: 16 [ 615.311463] env[62204]: ERROR nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Traceback (most recent call last): [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] yield resources [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self.driver.spawn(context, instance, image_meta, [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self._vmops.spawn(context, instance, image_meta, injected_files, [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 615.311463] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] vm_ref = self.build_virtual_machine(instance, [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] vif_infos = vmwarevif.get_vif_info(self._session, [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] for vif in network_info: [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return self._sync_wrapper(fn, *args, **kwargs) [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self.wait() [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self[:] = self._gt.wait() [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return self._exit_event.wait() [ 615.311876] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] result = hub.switch() [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return self.greenlet.switch() [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] result = function(*args, **kwargs) [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return func(*args, **kwargs) [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] raise e [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] nwinfo = self.network_api.allocate_for_instance( [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 615.312208] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] created_port_ids = self._update_ports_for_instance( [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] with excutils.save_and_reraise_exception(): [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self.force_reraise() [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] raise self.value [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] updated_port = self._update_port( [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] _ensure_no_port_binding_failure(port) [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 615.312537] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] raise exception.PortBindingFailed(port_id=port['id']) [ 615.312816] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 615.312816] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] [ 615.312816] env[62204]: INFO nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Terminating instance [ 615.320665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.320665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquired lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.320665] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 615.424770] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.632s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 615.425765] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 615.429636] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 19.391s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 615.441056] env[62204]: DEBUG nova.compute.manager [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Received event network-changed-e16e7a81-36b8-4855-a5a7-6de05aced016 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 615.445019] env[62204]: DEBUG nova.compute.manager [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Refreshing instance network info cache due to event network-changed-e16e7a81-36b8-4855-a5a7-6de05aced016. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 615.445019] env[62204]: DEBUG oslo_concurrency.lockutils [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] Acquiring lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.707759] env[62204]: DEBUG nova.network.neutron [-] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.852317] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.936024] env[62204]: DEBUG nova.compute.utils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.936481] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 615.936802] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 615.952208] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.013166] env[62204]: DEBUG nova.policy [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '902a7731e2664ccd8e880e1dd25b5598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '749474347b5d417197e01fcca204d3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 616.211592] env[62204]: INFO nova.compute.manager [-] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Took 1.03 seconds to deallocate network for instance. [ 616.220206] env[62204]: DEBUG nova.compute.claims [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 616.220536] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 616.444547] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 616.453782] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Releasing lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 616.454227] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 616.454419] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 616.454714] env[62204]: DEBUG oslo_concurrency.lockutils [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] Acquired lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.454908] env[62204]: DEBUG nova.network.neutron [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Refreshing network info cache for port e16e7a81-36b8-4855-a5a7-6de05aced016 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 616.455902] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc91a547-d14e-433f-99a0-5f8605fe3be5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.466955] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d833b7d9-fdf1-4d3d-a3af-fe18bbcd5bff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.479127] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd6a613-1580-49ec-85af-67ad63b44571 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.488574] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519430a8-7707-4f05-be90-f85c376e8564 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.496822] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d52bfb49-beb0-4bfe-b3bb-45132c210065 could not be found. [ 616.497121] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 616.497389] env[62204]: INFO nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Took 0.04 seconds to destroy the instance on the hypervisor. [ 616.497790] env[62204]: DEBUG oslo.service.loopingcall [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 616.498701] env[62204]: DEBUG nova.compute.manager [-] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 616.498870] env[62204]: DEBUG nova.network.neutron [-] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 616.527913] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31aea63-9a27-40de-8e34-49fa075f871b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.535107] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8beb030-dab9-4b64-992c-21f9bddad45c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.554887] env[62204]: DEBUG nova.compute.provider_tree [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.557080] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Successfully created port: a22305f5-bf81-44e2-8ff5-08574b1b4374 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.559221] env[62204]: DEBUG nova.network.neutron [-] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.980435] env[62204]: DEBUG nova.network.neutron [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.045990] env[62204]: DEBUG nova.network.neutron [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.064276] env[62204]: DEBUG nova.scheduler.client.report [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 617.067307] env[62204]: DEBUG nova.network.neutron [-] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.458925] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 617.484017] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.484017] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.484017] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.484256] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.484256] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.484795] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.484795] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.488106] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.488106] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.488106] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.488106] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.488106] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ac6c71-7ed8-4885-97e4-392990d199bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.499487] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b0ff8f-92cd-4496-8058-ab3170ee0c1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.551213] env[62204]: DEBUG oslo_concurrency.lockutils [req-2c46c0dd-0c93-49c1-bb7b-75034b018131 req-d4588b33-5a38-4061-8cfa-6f61b6e05bb1 service nova] Releasing lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 617.573029] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.141s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.573029] env[62204]: ERROR nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Traceback (most recent call last): [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self.driver.spawn(context, instance, image_meta, [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.573029] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] vm_ref = self.build_virtual_machine(instance, [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] for vif in network_info: [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] return self._sync_wrapper(fn, *args, **kwargs) [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self.wait() [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self[:] = self._gt.wait() [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] return self._exit_event.wait() [ 617.573350] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] current.throw(*self._exc) [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] result = function(*args, **kwargs) [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] return func(*args, **kwargs) [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] raise e [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] nwinfo = self.network_api.allocate_for_instance( [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] created_port_ids = self._update_ports_for_instance( [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.573727] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] with excutils.save_and_reraise_exception(): [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] self.force_reraise() [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] raise self.value [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] updated_port = self._update_port( [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] _ensure_no_port_binding_failure(port) [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] raise exception.PortBindingFailed(port_id=port['id']) [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] nova.exception.PortBindingFailed: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. [ 617.574069] env[62204]: ERROR nova.compute.manager [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] [ 617.574424] env[62204]: DEBUG nova.compute.utils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 617.576555] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Build of instance 6c675e27-0de4-46bc-8017-5ee43e2efa5c was re-scheduled: Binding failed for port 8cc4d39e-2876-442f-9ef0-772587ec25a5, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 617.576555] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 617.576555] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquiring lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.576555] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Acquired lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.577146] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.579685] env[62204]: INFO nova.compute.manager [-] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Took 1.08 seconds to deallocate network for instance. [ 617.579685] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.186s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.585983] env[62204]: DEBUG nova.compute.claims [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 617.585983] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.608119] env[62204]: DEBUG nova.compute.manager [req-252b030f-3223-44d7-bad9-6812dabfa8e9 req-cacc735f-e514-4c09-a00c-cdb1f282cfc8 service nova] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Received event network-vif-deleted-e16e7a81-36b8-4855-a5a7-6de05aced016 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 617.808882] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 617.809201] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.111068] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.118231] env[62204]: DEBUG nova.compute.manager [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Received event network-changed-a22305f5-bf81-44e2-8ff5-08574b1b4374 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 618.118384] env[62204]: DEBUG nova.compute.manager [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Refreshing instance network info cache due to event network-changed-a22305f5-bf81-44e2-8ff5-08574b1b4374. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 618.118595] env[62204]: DEBUG oslo_concurrency.lockutils [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] Acquiring lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.118724] env[62204]: DEBUG oslo_concurrency.lockutils [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] Acquired lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.119065] env[62204]: DEBUG nova.network.neutron [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Refreshing network info cache for port a22305f5-bf81-44e2-8ff5-08574b1b4374 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 618.198101] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.243319] env[62204]: ERROR nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 618.243319] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.243319] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.243319] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.243319] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.243319] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.243319] env[62204]: ERROR nova.compute.manager raise self.value [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.243319] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 618.243319] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.243319] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 618.243767] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.243767] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 618.243767] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 618.243767] env[62204]: ERROR nova.compute.manager [ 618.243767] env[62204]: Traceback (most recent call last): [ 618.243767] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 618.243767] env[62204]: listener.cb(fileno) [ 618.243767] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.243767] env[62204]: result = function(*args, **kwargs) [ 618.243767] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.243767] env[62204]: return func(*args, **kwargs) [ 618.243767] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.243767] env[62204]: raise e [ 618.243767] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.243767] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 618.243767] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.243767] env[62204]: created_port_ids = self._update_ports_for_instance( [ 618.243767] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.243767] env[62204]: with excutils.save_and_reraise_exception(): [ 618.243767] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.243767] env[62204]: self.force_reraise() [ 618.243767] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.243767] env[62204]: raise self.value [ 618.243767] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.243767] env[62204]: updated_port = self._update_port( [ 618.243767] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.243767] env[62204]: _ensure_no_port_binding_failure(port) [ 618.243767] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.243767] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 618.244765] env[62204]: nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 618.244765] env[62204]: Removing descriptor: 14 [ 618.244765] env[62204]: ERROR nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Traceback (most recent call last): [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] yield resources [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self.driver.spawn(context, instance, image_meta, [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 618.244765] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] vm_ref = self.build_virtual_machine(instance, [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] vif_infos = vmwarevif.get_vif_info(self._session, [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] for vif in network_info: [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return self._sync_wrapper(fn, *args, **kwargs) [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self.wait() [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self[:] = self._gt.wait() [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return self._exit_event.wait() [ 618.245173] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] result = hub.switch() [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return self.greenlet.switch() [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] result = function(*args, **kwargs) [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return func(*args, **kwargs) [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] raise e [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] nwinfo = self.network_api.allocate_for_instance( [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 618.245482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] created_port_ids = self._update_ports_for_instance( [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] with excutils.save_and_reraise_exception(): [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self.force_reraise() [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] raise self.value [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] updated_port = self._update_port( [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] _ensure_no_port_binding_failure(port) [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 618.245800] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] raise exception.PortBindingFailed(port_id=port['id']) [ 618.246274] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 618.246274] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] [ 618.246274] env[62204]: INFO nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Terminating instance [ 618.250467] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 618.316860] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.317438] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 618.317438] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Rebuilding the list of instances to heal {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 618.533407] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8026147-91b9-4e86-ab13-f07c89a0f89c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.540782] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883c4911-7739-497f-895d-920ce3cb1bc9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.570754] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1e8cce-0430-4f1e-8b95-382932ab0719 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.577959] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b6f19a-33ca-46cb-962c-384eb62993fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.590877] env[62204]: DEBUG nova.compute.provider_tree [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.645570] env[62204]: DEBUG nova.network.neutron [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.703174] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Releasing lock "refresh_cache-6c675e27-0de4-46bc-8017-5ee43e2efa5c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.703316] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 618.703809] env[62204]: DEBUG nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.704131] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.721472] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.751376] env[62204]: DEBUG nova.network.neutron [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.823107] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 618.823107] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 72514005-1023-4db6-9e51-9b0855083411] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 618.823107] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 618.823297] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 618.823297] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 618.823365] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Didn't find any instances for network info cache update. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 618.823584] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.823806] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.824094] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.824259] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.824425] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.824580] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 618.824706] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 618.824850] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.094537] env[62204]: DEBUG nova.scheduler.client.report [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.224144] env[62204]: DEBUG nova.network.neutron [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.254444] env[62204]: DEBUG oslo_concurrency.lockutils [req-6cf2d0c8-0ee4-4ad5-9729-a49fbaad7a46 req-8a0f2e56-ee5f-48d5-a415-82bac0c9c116 service nova] Releasing lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.254845] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.255042] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 619.328116] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.599543] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.020s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.600232] env[62204]: ERROR nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Traceback (most recent call last): [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self.driver.spawn(context, instance, image_meta, [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self._vmops.spawn(context, instance, image_meta, injected_files, [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] vm_ref = self.build_virtual_machine(instance, [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] vif_infos = vmwarevif.get_vif_info(self._session, [ 619.600232] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] for vif in network_info: [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return self._sync_wrapper(fn, *args, **kwargs) [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self.wait() [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self[:] = self._gt.wait() [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return self._exit_event.wait() [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] result = hub.switch() [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 619.600625] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return self.greenlet.switch() [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] result = function(*args, **kwargs) [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] return func(*args, **kwargs) [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] raise e [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] nwinfo = self.network_api.allocate_for_instance( [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] created_port_ids = self._update_ports_for_instance( [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] with excutils.save_and_reraise_exception(): [ 619.601205] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] self.force_reraise() [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] raise self.value [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] updated_port = self._update_port( [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] _ensure_no_port_binding_failure(port) [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] raise exception.PortBindingFailed(port_id=port['id']) [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] nova.exception.PortBindingFailed: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. [ 619.601672] env[62204]: ERROR nova.compute.manager [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] [ 619.602114] env[62204]: DEBUG nova.compute.utils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 619.605131] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.314s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.606892] env[62204]: INFO nova.compute.claims [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.610131] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Build of instance 4a2b5dbe-ed48-40b6-ba72-a06b14e31696 was re-scheduled: Binding failed for port 256afa13-f081-45fc-9096-bb9738af9937, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 619.610751] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 619.611084] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquiring lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.611311] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Acquired lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.611514] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 619.726765] env[62204]: INFO nova.compute.manager [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] [instance: 6c675e27-0de4-46bc-8017-5ee43e2efa5c] Took 1.02 seconds to deallocate network for instance. [ 619.772329] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.086375] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.140919] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.304274] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.591517] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.591517] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 620.591517] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.591517] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88ababd1-d8fe-41d0-8e07-71c5c59bdced {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.600717] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19377aa9-e570-4afa-b76a-3f776f643db5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.623879] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf could not be found. [ 620.624151] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.624376] env[62204]: INFO nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 620.624662] env[62204]: DEBUG oslo.service.loopingcall [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 620.624884] env[62204]: DEBUG nova.compute.manager [-] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.624981] env[62204]: DEBUG nova.network.neutron [-] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 620.653911] env[62204]: DEBUG nova.network.neutron [-] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.758076] env[62204]: INFO nova.scheduler.client.report [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Deleted allocations for instance 6c675e27-0de4-46bc-8017-5ee43e2efa5c [ 620.806782] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Releasing lock "refresh_cache-4a2b5dbe-ed48-40b6-ba72-a06b14e31696" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.807023] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 620.807218] env[62204]: DEBUG nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 620.807367] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 620.836831] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 620.865085] env[62204]: DEBUG nova.compute.manager [req-9d578f66-aed9-419c-a9c8-04c87620fb1b req-56f2afff-758e-49d4-a25f-b047d58c29dc service nova] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Received event network-vif-deleted-a22305f5-bf81-44e2-8ff5-08574b1b4374 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 621.056443] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d18ca4a-70e6-4b6b-89db-528022b95e17 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.064383] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9f3558-eaaa-4db1-bc36-f063dc0567c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.097051] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ecb50e-b4c4-439c-ba37-322f81b067ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.104290] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff68fbf8-8f4c-40b6-aaa7-62bfd4094053 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.117837] env[62204]: DEBUG nova.compute.provider_tree [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.158090] env[62204]: DEBUG nova.network.neutron [-] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.272906] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6ce97d24-d7c1-410e-ac03-070752fddf8a tempest-ServersTestBootFromVolume-524406829 tempest-ServersTestBootFromVolume-524406829-project-member] Lock "6c675e27-0de4-46bc-8017-5ee43e2efa5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.494s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.337489] env[62204]: DEBUG nova.network.neutron [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.621745] env[62204]: DEBUG nova.scheduler.client.report [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.660475] env[62204]: INFO nova.compute.manager [-] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Took 1.04 seconds to deallocate network for instance. [ 621.663677] env[62204]: DEBUG nova.compute.claims [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 621.663677] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 621.775473] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 621.839917] env[62204]: INFO nova.compute.manager [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] [instance: 4a2b5dbe-ed48-40b6-ba72-a06b14e31696] Took 1.03 seconds to deallocate network for instance. [ 622.134240] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.134816] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 622.138386] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.075s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.310765] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.328236] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "69604167-6a61-4723-bf7d-7ba168837839" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.328236] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "69604167-6a61-4723-bf7d-7ba168837839" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.640425] env[62204]: DEBUG nova.compute.utils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 622.641904] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 622.642099] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 622.764283] env[62204]: DEBUG nova.policy [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '902a7731e2664ccd8e880e1dd25b5598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '749474347b5d417197e01fcca204d3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 622.877050] env[62204]: INFO nova.scheduler.client.report [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Deleted allocations for instance 4a2b5dbe-ed48-40b6-ba72-a06b14e31696 [ 623.059124] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57fed28-212a-42e6-a63a-54813f702e7f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.068351] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ba8602-ee4b-40d3-86be-54eb46aaf48d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.099857] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4b84d1-ff39-4c26-ad55-aa8f23cad64a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.107561] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe332bb-f384-4b7a-83f4-c3c1d5527f55 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.122032] env[62204]: DEBUG nova.compute.provider_tree [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.153050] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 623.312077] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Successfully created port: 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.390347] env[62204]: DEBUG oslo_concurrency.lockutils [None req-83251a16-8297-4388-9b71-14c75db8b5ea tempest-FloatingIPsAssociationTestJSON-1735526649 tempest-FloatingIPsAssociationTestJSON-1735526649-project-member] Lock "4a2b5dbe-ed48-40b6-ba72-a06b14e31696" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.047s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.625423] env[62204]: DEBUG nova.scheduler.client.report [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 623.895747] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 624.131567] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.993s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.132211] env[62204]: ERROR nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Traceback (most recent call last): [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self.driver.spawn(context, instance, image_meta, [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] vm_ref = self.build_virtual_machine(instance, [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] vif_infos = vmwarevif.get_vif_info(self._session, [ 624.132211] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] for vif in network_info: [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return self._sync_wrapper(fn, *args, **kwargs) [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self.wait() [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self[:] = self._gt.wait() [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return self._exit_event.wait() [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] result = hub.switch() [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 624.132626] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return self.greenlet.switch() [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] result = function(*args, **kwargs) [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] return func(*args, **kwargs) [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] raise e [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] nwinfo = self.network_api.allocate_for_instance( [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] created_port_ids = self._update_ports_for_instance( [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] with excutils.save_and_reraise_exception(): [ 624.132955] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] self.force_reraise() [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] raise self.value [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] updated_port = self._update_port( [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] _ensure_no_port_binding_failure(port) [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] raise exception.PortBindingFailed(port_id=port['id']) [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] nova.exception.PortBindingFailed: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. [ 624.133288] env[62204]: ERROR nova.compute.manager [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] [ 624.133604] env[62204]: DEBUG nova.compute.utils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 624.137553] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.207s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.137972] env[62204]: INFO nova.compute.claims [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.142967] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Build of instance 571b574b-27f2-4e95-9309-fd3097fb4f64 was re-scheduled: Binding failed for port 44d69fe8-1077-4988-8ee0-a3208baaca5f, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 624.142967] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 624.142967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquiring lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 624.142967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Acquired lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.143237] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 624.164788] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 624.196768] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 624.197048] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 624.197229] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.197412] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 624.197555] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.197702] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 624.197905] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 624.198224] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 624.198421] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 624.198583] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 624.198751] env[62204]: DEBUG nova.virt.hardware [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 624.203065] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4ba159-2eed-40fa-b946-3480b53a947a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.212107] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c1c685-3330-4717-8402-13bd702ea608 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.422984] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.666372] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.774098] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.031154] env[62204]: ERROR nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 625.031154] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.031154] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.031154] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.031154] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.031154] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.031154] env[62204]: ERROR nova.compute.manager raise self.value [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.031154] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 625.031154] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.031154] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 625.031976] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.031976] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 625.031976] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 625.031976] env[62204]: ERROR nova.compute.manager [ 625.031976] env[62204]: Traceback (most recent call last): [ 625.031976] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 625.031976] env[62204]: listener.cb(fileno) [ 625.031976] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.031976] env[62204]: result = function(*args, **kwargs) [ 625.031976] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.031976] env[62204]: return func(*args, **kwargs) [ 625.031976] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.031976] env[62204]: raise e [ 625.031976] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.031976] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 625.031976] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.031976] env[62204]: created_port_ids = self._update_ports_for_instance( [ 625.031976] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.031976] env[62204]: with excutils.save_and_reraise_exception(): [ 625.031976] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.031976] env[62204]: self.force_reraise() [ 625.031976] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.031976] env[62204]: raise self.value [ 625.031976] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.031976] env[62204]: updated_port = self._update_port( [ 625.031976] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.031976] env[62204]: _ensure_no_port_binding_failure(port) [ 625.031976] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.031976] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 625.033036] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 625.033036] env[62204]: Removing descriptor: 14 [ 625.033036] env[62204]: ERROR nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Traceback (most recent call last): [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] yield resources [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self.driver.spawn(context, instance, image_meta, [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.033036] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] vm_ref = self.build_virtual_machine(instance, [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] for vif in network_info: [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return self._sync_wrapper(fn, *args, **kwargs) [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self.wait() [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self[:] = self._gt.wait() [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return self._exit_event.wait() [ 625.033333] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] result = hub.switch() [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return self.greenlet.switch() [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] result = function(*args, **kwargs) [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return func(*args, **kwargs) [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] raise e [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] nwinfo = self.network_api.allocate_for_instance( [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.033637] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] created_port_ids = self._update_ports_for_instance( [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] with excutils.save_and_reraise_exception(): [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self.force_reraise() [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] raise self.value [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] updated_port = self._update_port( [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] _ensure_no_port_binding_failure(port) [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.033992] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] raise exception.PortBindingFailed(port_id=port['id']) [ 625.034276] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 625.034276] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] [ 625.034276] env[62204]: INFO nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Terminating instance [ 625.039093] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.039093] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.040260] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.054041] env[62204]: DEBUG nova.compute.manager [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Received event network-changed-8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 625.054279] env[62204]: DEBUG nova.compute.manager [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Refreshing instance network info cache due to event network-changed-8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 625.054433] env[62204]: DEBUG oslo_concurrency.lockutils [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] Acquiring lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.278828] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Releasing lock "refresh_cache-571b574b-27f2-4e95-9309-fd3097fb4f64" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.279273] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 625.279273] env[62204]: DEBUG nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 625.280104] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 625.312251] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.564906] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.668602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83efe04-2154-447b-b71c-6d6f8212fa38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.676730] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f0058b-496a-434a-baa1-ed3fa120a9b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.706583] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a385a74-9c2c-4b90-92cf-6c0250af669c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.714084] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8341bc1-3ca1-4835-b8ac-b167518eabd3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.727409] env[62204]: DEBUG nova.compute.provider_tree [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.814156] env[62204]: DEBUG nova.network.neutron [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.878681] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.230114] env[62204]: DEBUG nova.scheduler.client.report [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 626.317361] env[62204]: INFO nova.compute.manager [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] [instance: 571b574b-27f2-4e95-9309-fd3097fb4f64] Took 1.04 seconds to deallocate network for instance. [ 626.381608] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 626.383541] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 626.383541] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 626.383541] env[62204]: DEBUG oslo_concurrency.lockutils [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] Acquired lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.384256] env[62204]: DEBUG nova.network.neutron [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Refreshing network info cache for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 626.386258] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4306a53c-177b-419f-8764-247f0c059296 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.400707] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8cbb00-240a-4cba-923c-d9e22306c3a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.425760] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 292b9c31-2ea1-4b28-8b60-79c6c80e1531 could not be found. [ 626.425999] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 626.426394] env[62204]: INFO nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Took 0.04 seconds to destroy the instance on the hypervisor. [ 626.426714] env[62204]: DEBUG oslo.service.loopingcall [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.427508] env[62204]: DEBUG nova.compute.manager [-] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 626.427508] env[62204]: DEBUG nova.network.neutron [-] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 626.473733] env[62204]: DEBUG nova.network.neutron [-] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.737873] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.602s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.738679] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 626.741800] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.498s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.918598] env[62204]: DEBUG nova.network.neutron [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.977534] env[62204]: DEBUG nova.network.neutron [-] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.984590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.984590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.039680] env[62204]: DEBUG nova.network.neutron [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.169092] env[62204]: DEBUG nova.compute.manager [req-90ed2d18-b7f2-47b6-9ce2-5de03776a0b0 req-4ccec089-7361-4741-be73-8ff1936c0251 service nova] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Received event network-vif-deleted-8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 627.246480] env[62204]: DEBUG nova.compute.utils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 627.247933] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 627.248608] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 627.322586] env[62204]: DEBUG nova.policy [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '63cb260daaa8419a864b9822f1beaf64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed3da3ed958647d9bd110e7f167f1596', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 627.356145] env[62204]: INFO nova.scheduler.client.report [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Deleted allocations for instance 571b574b-27f2-4e95-9309-fd3097fb4f64 [ 627.485882] env[62204]: INFO nova.compute.manager [-] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Took 1.06 seconds to deallocate network for instance. [ 627.490638] env[62204]: DEBUG nova.compute.claims [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 627.490638] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.550018] env[62204]: DEBUG oslo_concurrency.lockutils [req-1d118a69-1e9c-4602-b64d-24c9aa6c1f17 req-1c1969b4-a1fe-4a19-b0dd-52b9857cbe83 service nova] Releasing lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.698042] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafe61c1-e192-44f2-85a2-b5d1fe100b1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.709750] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9670212f-332e-46cd-af9b-f28052270a69 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.744922] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f28c77-9559-4f15-ac6e-5ebc7e493977 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.752621] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 627.757135] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13593700-f099-41dc-b1ad-8064f812a47c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.773408] env[62204]: DEBUG nova.compute.provider_tree [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.844885] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Successfully created port: f31d329a-00eb-4234-8c40-0e7991448c15 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.868202] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7f123f69-ea22-4124-ba4e-b87e82de69b8 tempest-ServerDiagnosticsTest-20309774 tempest-ServerDiagnosticsTest-20309774-project-member] Lock "571b574b-27f2-4e95-9309-fd3097fb4f64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.117s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.280716] env[62204]: DEBUG nova.scheduler.client.report [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.371095] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 628.772776] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 628.787781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.047s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.788754] env[62204]: ERROR nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] Traceback (most recent call last): [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self.driver.spawn(context, instance, image_meta, [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] vm_ref = self.build_virtual_machine(instance, [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.788754] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] for vif in network_info: [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return self._sync_wrapper(fn, *args, **kwargs) [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self.wait() [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self[:] = self._gt.wait() [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return self._exit_event.wait() [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] result = hub.switch() [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.789385] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return self.greenlet.switch() [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] result = function(*args, **kwargs) [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] return func(*args, **kwargs) [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] raise e [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] nwinfo = self.network_api.allocate_for_instance( [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] created_port_ids = self._update_ports_for_instance( [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] with excutils.save_and_reraise_exception(): [ 628.789989] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] self.force_reraise() [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] raise self.value [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] updated_port = self._update_port( [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] _ensure_no_port_binding_failure(port) [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] raise exception.PortBindingFailed(port_id=port['id']) [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] nova.exception.PortBindingFailed: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. [ 628.790612] env[62204]: ERROR nova.compute.manager [instance: 72514005-1023-4db6-9e51-9b0855083411] [ 628.791145] env[62204]: DEBUG nova.compute.utils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.791145] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.700s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.792733] env[62204]: INFO nova.compute.claims [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.802504] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Build of instance 72514005-1023-4db6-9e51-9b0855083411 was re-scheduled: Binding failed for port d2a9edc0-fbb7-4e5c-a0be-bde57c6ef0c9, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 628.803067] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 628.803413] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquiring lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.803413] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Acquired lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.803615] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.818174] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.818543] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.818543] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.818612] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.818788] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.818895] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.820412] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.821070] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.821576] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.821939] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.822795] env[62204]: DEBUG nova.virt.hardware [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.824214] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6214a6-95d0-4e4c-8668-401d220ba36c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.840505] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad07755-a8ad-4624-955a-498494d63673 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.908177] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.317190] env[62204]: DEBUG nova.compute.manager [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Received event network-changed-f31d329a-00eb-4234-8c40-0e7991448c15 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 629.317541] env[62204]: DEBUG nova.compute.manager [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Refreshing instance network info cache due to event network-changed-f31d329a-00eb-4234-8c40-0e7991448c15. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 629.317541] env[62204]: DEBUG oslo_concurrency.lockutils [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] Acquiring lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.317725] env[62204]: DEBUG oslo_concurrency.lockutils [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] Acquired lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.317810] env[62204]: DEBUG nova.network.neutron [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Refreshing network info cache for port f31d329a-00eb-4234-8c40-0e7991448c15 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 629.349151] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.488273] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.658051] env[62204]: ERROR nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 629.658051] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.658051] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.658051] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.658051] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.658051] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.658051] env[62204]: ERROR nova.compute.manager raise self.value [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.658051] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 629.658051] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.658051] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 629.658630] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.658630] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 629.658630] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 629.658630] env[62204]: ERROR nova.compute.manager [ 629.658630] env[62204]: Traceback (most recent call last): [ 629.658630] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 629.658630] env[62204]: listener.cb(fileno) [ 629.658630] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.658630] env[62204]: result = function(*args, **kwargs) [ 629.658630] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.658630] env[62204]: return func(*args, **kwargs) [ 629.658630] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.658630] env[62204]: raise e [ 629.658630] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.658630] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 629.658630] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.658630] env[62204]: created_port_ids = self._update_ports_for_instance( [ 629.658630] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.658630] env[62204]: with excutils.save_and_reraise_exception(): [ 629.658630] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.658630] env[62204]: self.force_reraise() [ 629.658630] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.658630] env[62204]: raise self.value [ 629.658630] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.658630] env[62204]: updated_port = self._update_port( [ 629.658630] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.658630] env[62204]: _ensure_no_port_binding_failure(port) [ 629.658630] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.658630] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 629.659307] env[62204]: nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 629.659307] env[62204]: Removing descriptor: 14 [ 629.659307] env[62204]: ERROR nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Traceback (most recent call last): [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] yield resources [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self.driver.spawn(context, instance, image_meta, [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 629.659307] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] vm_ref = self.build_virtual_machine(instance, [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] vif_infos = vmwarevif.get_vif_info(self._session, [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] for vif in network_info: [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return self._sync_wrapper(fn, *args, **kwargs) [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self.wait() [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self[:] = self._gt.wait() [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return self._exit_event.wait() [ 629.659702] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] result = hub.switch() [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return self.greenlet.switch() [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] result = function(*args, **kwargs) [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return func(*args, **kwargs) [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] raise e [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] nwinfo = self.network_api.allocate_for_instance( [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 629.660100] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] created_port_ids = self._update_ports_for_instance( [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] with excutils.save_and_reraise_exception(): [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self.force_reraise() [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] raise self.value [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] updated_port = self._update_port( [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] _ensure_no_port_binding_failure(port) [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 629.660430] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] raise exception.PortBindingFailed(port_id=port['id']) [ 629.660754] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 629.660754] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] [ 629.660754] env[62204]: INFO nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Terminating instance [ 629.663958] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquiring lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.859881] env[62204]: DEBUG nova.network.neutron [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.978462] env[62204]: DEBUG nova.network.neutron [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.991859] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Releasing lock "refresh_cache-72514005-1023-4db6-9e51-9b0855083411" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.992263] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 629.992465] env[62204]: DEBUG nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 629.992631] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 630.013697] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.256774] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42a86e7-dd91-4f36-80ee-0de7321b375c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.265694] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eade9e78-6a19-4fb0-bc79-d5cd6dc2a0bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.295489] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4728ab-e30b-4819-aa67-8dc4e8e8acaa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.304252] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2fcc3a-5120-46ec-97df-79737f0dcddb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.327220] env[62204]: DEBUG nova.compute.provider_tree [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.483473] env[62204]: DEBUG oslo_concurrency.lockutils [req-ddec5b67-1dfd-4524-b155-d34add3b0d14 req-dee9b658-1379-408e-a6fd-92d740c6883a service nova] Releasing lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.483936] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquired lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.484174] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 630.515592] env[62204]: DEBUG nova.network.neutron [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.832919] env[62204]: DEBUG nova.scheduler.client.report [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 631.012644] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.019881] env[62204]: INFO nova.compute.manager [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] [instance: 72514005-1023-4db6-9e51-9b0855083411] Took 1.03 seconds to deallocate network for instance. [ 631.338125] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.338669] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 631.345022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.121s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.393214] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.516843] env[62204]: DEBUG nova.compute.manager [req-b819fa57-c91d-4689-b0a5-c6df87797d72 req-69184cd9-262c-444a-b9ad-e9e420e765b3 service nova] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Received event network-vif-deleted-f31d329a-00eb-4234-8c40-0e7991448c15 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 631.848548] env[62204]: DEBUG nova.compute.utils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 631.860090] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 631.861019] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 631.897198] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Releasing lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 631.897680] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 631.897894] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 631.898193] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a08d17c-6fe9-45ac-8e5d-3bb1b699101f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.910674] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c0c684-cc47-417b-9b8e-46f518c1d574 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.944980] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25e4a40e-c5a6-47f5-9e53-00d3073252fc could not be found. [ 631.945770] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.945770] env[62204]: INFO nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 631.945770] env[62204]: DEBUG oslo.service.loopingcall [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.946512] env[62204]: DEBUG nova.compute.manager [-] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 631.946512] env[62204]: DEBUG nova.network.neutron [-] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 631.974878] env[62204]: DEBUG nova.network.neutron [-] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.986139] env[62204]: DEBUG nova.policy [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd103506c3798439896357b04736a43b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6054de8928a649f289eeb55fc544a8ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 632.058384] env[62204]: INFO nova.scheduler.client.report [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Deleted allocations for instance 72514005-1023-4db6-9e51-9b0855083411 [ 632.361397] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 632.412547] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ee0177-9c31-436c-87cc-d4778e8fbc06 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.424163] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65654fc0-0a42-48bb-8307-4ad9a7e96969 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.472655] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bd1198-0ef9-4b50-b540-55dbf081b447 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.482690] env[62204]: DEBUG nova.network.neutron [-] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.484978] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160933e4-5245-4047-9370-a905726c8dff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.500722] env[62204]: DEBUG nova.compute.provider_tree [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.574457] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7352ad87-4d31-41da-b14f-853abd132df9 tempest-ServerActionsTestOtherA-1862396041 tempest-ServerActionsTestOtherA-1862396041-project-member] Lock "72514005-1023-4db6-9e51-9b0855083411" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.347s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.991108] env[62204]: INFO nova.compute.manager [-] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Took 1.04 seconds to deallocate network for instance. [ 632.996348] env[62204]: DEBUG nova.compute.claims [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 632.996348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.004382] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Successfully created port: fcabcbf5-1604-4373-9e22-9cbaff73ef96 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 633.011688] env[62204]: DEBUG nova.scheduler.client.report [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 633.077623] env[62204]: DEBUG nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 633.381063] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 633.418094] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.418385] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.418576] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.419927] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.419927] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.419927] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.419927] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.419927] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.420106] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.420106] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.420313] env[62204]: DEBUG nova.virt.hardware [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.421611] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e549ec2-2aaf-4cea-a7f5-8aa67e0f86b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.430023] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21784be2-de35-4b3b-adc3-17a39adce47c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.517874] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.176s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.518592] env[62204]: ERROR nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Traceback (most recent call last): [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self.driver.spawn(context, instance, image_meta, [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] vm_ref = self.build_virtual_machine(instance, [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] vif_infos = vmwarevif.get_vif_info(self._session, [ 633.518592] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] for vif in network_info: [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return self._sync_wrapper(fn, *args, **kwargs) [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self.wait() [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self[:] = self._gt.wait() [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return self._exit_event.wait() [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] result = hub.switch() [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 633.518986] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return self.greenlet.switch() [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] result = function(*args, **kwargs) [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] return func(*args, **kwargs) [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] raise e [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] nwinfo = self.network_api.allocate_for_instance( [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] created_port_ids = self._update_ports_for_instance( [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] with excutils.save_and_reraise_exception(): [ 633.519371] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] self.force_reraise() [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] raise self.value [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] updated_port = self._update_port( [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] _ensure_no_port_binding_failure(port) [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] raise exception.PortBindingFailed(port_id=port['id']) [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] nova.exception.PortBindingFailed: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. [ 633.519762] env[62204]: ERROR nova.compute.manager [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] [ 633.520141] env[62204]: DEBUG nova.compute.utils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 633.520606] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.935s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 633.523616] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Build of instance 47409cd0-db33-4a94-b806-1799a6f7e98f was re-scheduled: Binding failed for port aa5a81de-d840-4752-8820-0cc5cfdb186f, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 633.524100] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 633.524309] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquiring lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 633.524486] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Acquired lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.524660] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 633.609437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.057675] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.201630] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.461162] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb94660-fc40-465e-bd53-5146dd82f448 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.469882] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7a65c6-1b2d-4e5a-876b-35ea5e346e0e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.506660] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1021926-95c6-44d6-9915-8aceec97190c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.514281] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09022773-bb42-43d3-9cbd-fd262c887374 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.529416] env[62204]: DEBUG nova.compute.provider_tree [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.618026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.618026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.704207] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Releasing lock "refresh_cache-47409cd0-db33-4a94-b806-1799a6f7e98f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.705310] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 634.705310] env[62204]: DEBUG nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.705310] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 634.727126] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.033064] env[62204]: DEBUG nova.scheduler.client.report [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 635.077174] env[62204]: DEBUG nova.compute.manager [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Received event network-changed-fcabcbf5-1604-4373-9e22-9cbaff73ef96 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 635.078201] env[62204]: DEBUG nova.compute.manager [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Refreshing instance network info cache due to event network-changed-fcabcbf5-1604-4373-9e22-9cbaff73ef96. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 635.078201] env[62204]: DEBUG oslo_concurrency.lockutils [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] Acquiring lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.078201] env[62204]: DEBUG oslo_concurrency.lockutils [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] Acquired lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.078201] env[62204]: DEBUG nova.network.neutron [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Refreshing network info cache for port fcabcbf5-1604-4373-9e22-9cbaff73ef96 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.232588] env[62204]: DEBUG nova.network.neutron [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.332076] env[62204]: ERROR nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 635.332076] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.332076] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.332076] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.332076] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.332076] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.332076] env[62204]: ERROR nova.compute.manager raise self.value [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.332076] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 635.332076] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.332076] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 635.332852] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.332852] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 635.332852] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 635.332852] env[62204]: ERROR nova.compute.manager [ 635.332852] env[62204]: Traceback (most recent call last): [ 635.332852] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 635.332852] env[62204]: listener.cb(fileno) [ 635.332852] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.332852] env[62204]: result = function(*args, **kwargs) [ 635.332852] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.332852] env[62204]: return func(*args, **kwargs) [ 635.332852] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.332852] env[62204]: raise e [ 635.332852] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.332852] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 635.332852] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.332852] env[62204]: created_port_ids = self._update_ports_for_instance( [ 635.332852] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.332852] env[62204]: with excutils.save_and_reraise_exception(): [ 635.332852] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.332852] env[62204]: self.force_reraise() [ 635.332852] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.332852] env[62204]: raise self.value [ 635.332852] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.332852] env[62204]: updated_port = self._update_port( [ 635.332852] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.332852] env[62204]: _ensure_no_port_binding_failure(port) [ 635.332852] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.332852] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 635.333556] env[62204]: nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 635.333556] env[62204]: Removing descriptor: 14 [ 635.333556] env[62204]: ERROR nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Traceback (most recent call last): [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] yield resources [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self.driver.spawn(context, instance, image_meta, [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.333556] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] vm_ref = self.build_virtual_machine(instance, [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] for vif in network_info: [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return self._sync_wrapper(fn, *args, **kwargs) [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self.wait() [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self[:] = self._gt.wait() [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return self._exit_event.wait() [ 635.333845] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] result = hub.switch() [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return self.greenlet.switch() [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] result = function(*args, **kwargs) [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return func(*args, **kwargs) [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] raise e [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] nwinfo = self.network_api.allocate_for_instance( [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.334193] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] created_port_ids = self._update_ports_for_instance( [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] with excutils.save_and_reraise_exception(): [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self.force_reraise() [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] raise self.value [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] updated_port = self._update_port( [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] _ensure_no_port_binding_failure(port) [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.334546] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] raise exception.PortBindingFailed(port_id=port['id']) [ 635.334831] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 635.334831] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] [ 635.334831] env[62204]: INFO nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Terminating instance [ 635.336461] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.540021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.019s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.544168] env[62204]: ERROR nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Traceback (most recent call last): [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self.driver.spawn(context, instance, image_meta, [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self._vmops.spawn(context, instance, image_meta, injected_files, [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] vm_ref = self.build_virtual_machine(instance, [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] vif_infos = vmwarevif.get_vif_info(self._session, [ 635.544168] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] for vif in network_info: [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return self._sync_wrapper(fn, *args, **kwargs) [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self.wait() [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self[:] = self._gt.wait() [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return self._exit_event.wait() [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] result = hub.switch() [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 635.544545] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return self.greenlet.switch() [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] result = function(*args, **kwargs) [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] return func(*args, **kwargs) [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] raise e [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] nwinfo = self.network_api.allocate_for_instance( [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] created_port_ids = self._update_ports_for_instance( [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] with excutils.save_and_reraise_exception(): [ 635.544915] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] self.force_reraise() [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] raise self.value [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] updated_port = self._update_port( [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] _ensure_no_port_binding_failure(port) [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] raise exception.PortBindingFailed(port_id=port['id']) [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] nova.exception.PortBindingFailed: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. [ 635.545342] env[62204]: ERROR nova.compute.manager [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] [ 635.545665] env[62204]: DEBUG nova.compute.utils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 635.548480] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.218s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.548480] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.548480] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 635.548480] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.884s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 635.556815] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Build of instance d52bfb49-beb0-4bfe-b3bb-45132c210065 was re-scheduled: Binding failed for port e16e7a81-36b8-4855-a5a7-6de05aced016, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 635.556815] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 635.556815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquiring lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 635.556815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Acquired lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 635.556972] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 635.556972] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc61cfbe-daa5-4d96-8b3d-5125d34ac86a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.569018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de36258a-8a45-4c5b-85ce-ccd8bf45fa9e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.586720] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070c2491-8ed2-4efc-b4c4-103709430cf7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.595465] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b436fda-a07e-44bd-a8c0-d84e652fe92a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.636187] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181200MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 635.636187] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.636187] env[62204]: DEBUG nova.network.neutron [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 635.738841] env[62204]: INFO nova.compute.manager [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] [instance: 47409cd0-db33-4a94-b806-1799a6f7e98f] Took 1.03 seconds to deallocate network for instance. [ 635.843218] env[62204]: DEBUG nova.network.neutron [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.091800] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.250033] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.346243] env[62204]: DEBUG oslo_concurrency.lockutils [req-59421037-2f6b-4226-8c2b-1875264bf19f req-73ebc79f-4642-49da-8d1c-041a1a22082d service nova] Releasing lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.346660] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquired lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.346838] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 636.512159] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc34713-bd80-4d00-b1a2-35b8c5bc89c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.520236] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce919537-e9d5-4b32-8662-35a87491d4e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.557353] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c81c066-5ca1-4db4-83e1-480b048fe234 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.568672] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd5e3ab-1243-46cc-ac14-9ac831d76524 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.581459] env[62204]: DEBUG nova.compute.provider_tree [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.757793] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Releasing lock "refresh_cache-d52bfb49-beb0-4bfe-b3bb-45132c210065" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 636.757793] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 636.757793] env[62204]: DEBUG nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 636.757793] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 636.783470] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.831644] env[62204]: INFO nova.scheduler.client.report [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Deleted allocations for instance 47409cd0-db33-4a94-b806-1799a6f7e98f [ 636.892233] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.084820] env[62204]: DEBUG nova.scheduler.client.report [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 637.118323] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.286496] env[62204]: DEBUG nova.network.neutron [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.344328] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b88cda17-03e2-410d-a0a2-0d87f9f0a215 tempest-AttachInterfacesUnderV243Test-1893097310 tempest-AttachInterfacesUnderV243Test-1893097310-project-member] Lock "47409cd0-db33-4a94-b806-1799a6f7e98f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.390s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.591756] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.043s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.592859] env[62204]: ERROR nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Traceback (most recent call last): [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self.driver.spawn(context, instance, image_meta, [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] vm_ref = self.build_virtual_machine(instance, [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] vif_infos = vmwarevif.get_vif_info(self._session, [ 637.592859] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] for vif in network_info: [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return self._sync_wrapper(fn, *args, **kwargs) [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self.wait() [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self[:] = self._gt.wait() [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return self._exit_event.wait() [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] result = hub.switch() [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 637.593177] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return self.greenlet.switch() [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] result = function(*args, **kwargs) [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] return func(*args, **kwargs) [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] raise e [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] nwinfo = self.network_api.allocate_for_instance( [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] created_port_ids = self._update_ports_for_instance( [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] with excutils.save_and_reraise_exception(): [ 637.593482] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] self.force_reraise() [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] raise self.value [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] updated_port = self._update_port( [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] _ensure_no_port_binding_failure(port) [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] raise exception.PortBindingFailed(port_id=port['id']) [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] nova.exception.PortBindingFailed: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. [ 637.593860] env[62204]: ERROR nova.compute.manager [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] [ 637.594160] env[62204]: DEBUG nova.compute.utils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 637.596442] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.286s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.598917] env[62204]: INFO nova.compute.claims [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.603445] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Build of instance 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf was re-scheduled: Binding failed for port a22305f5-bf81-44e2-8ff5-08574b1b4374, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 637.604170] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 637.604509] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.604781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.605115] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 637.621316] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Releasing lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.621937] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 637.622290] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 637.623094] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d8c3c5c8-3b6c-4d5b-8852-c7e6193b3d8a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.634646] env[62204]: DEBUG nova.compute.manager [req-05dcbe6e-866f-4087-a061-79cade98c5ca req-5e6a4d80-53c1-46be-84f4-8ad81b0aefb7 service nova] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Received event network-vif-deleted-fcabcbf5-1604-4373-9e22-9cbaff73ef96 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 637.648503] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fc3f82-2b9d-4ac3-841e-60cbf6c74902 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.686525] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5a11b74-e1fe-44c0-bccb-5817bc582608 could not be found. [ 637.686917] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 637.687200] env[62204]: INFO nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Took 0.06 seconds to destroy the instance on the hypervisor. [ 637.687580] env[62204]: DEBUG oslo.service.loopingcall [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.688249] env[62204]: DEBUG nova.compute.manager [-] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 637.689206] env[62204]: DEBUG nova.network.neutron [-] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 637.733322] env[62204]: DEBUG nova.network.neutron [-] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.790864] env[62204]: INFO nova.compute.manager [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] [instance: d52bfb49-beb0-4bfe-b3bb-45132c210065] Took 1.04 seconds to deallocate network for instance. [ 637.849089] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 638.141293] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.243654] env[62204]: DEBUG nova.network.neutron [-] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.325091] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.363276] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.363903] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.384865] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.746995] env[62204]: INFO nova.compute.manager [-] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Took 1.06 seconds to deallocate network for instance. [ 638.748151] env[62204]: DEBUG nova.compute.claims [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 638.748344] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 638.831776] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "refresh_cache-0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.832071] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 638.832307] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.832476] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.839494] env[62204]: INFO nova.scheduler.client.report [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Deleted allocations for instance d52bfb49-beb0-4bfe-b3bb-45132c210065 [ 638.863983] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.294482] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cca1be-9a52-48b5-b0bf-1842bcc7ab6b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.302221] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a1e861-df68-41a3-ba0b-5f3ad9cfd705 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.343902] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6426c3c-d6f1-4b4a-b944-10c946171f37 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.352972] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974b6106-487e-4589-85eb-4820800d2264 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.358486] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4356007f-6b3a-4b04-9031-824316378f0f tempest-MigrationsAdminTest-222504242 tempest-MigrationsAdminTest-222504242-project-member] Lock "d52bfb49-beb0-4bfe-b3bb-45132c210065" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.447s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 639.370190] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.372349] env[62204]: DEBUG nova.compute.provider_tree [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.861205] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 639.876782] env[62204]: INFO nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf] Took 1.04 seconds to deallocate network for instance. [ 639.882228] env[62204]: DEBUG nova.scheduler.client.report [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.387438] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.395020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.395020] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 640.395020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.973s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.400831] env[62204]: INFO nova.compute.claims [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.907061] env[62204]: DEBUG nova.compute.utils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 640.916020] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 640.916020] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 640.944295] env[62204]: INFO nova.scheduler.client.report [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleted allocations for instance 0b090f00-8eb5-435f-a1ca-05fa5acdeaaf [ 641.092660] env[62204]: DEBUG nova.policy [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '370d4b8a24b84bf0a626d056c7758863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb9a24ef26c74781a2ad36e3430ce630', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 641.416837] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 641.457709] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "0b090f00-8eb5-435f-a1ca-05fa5acdeaaf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.874s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 641.647232] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Successfully created port: 51133827-fa1c-4881-8008-e3e8a2aef9e6 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 641.894927] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e94190f-015a-4253-b0ea-3bdfb90e0bb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.906793] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dec70d-444e-4289-8d6b-05c2cf8bee13 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.951095] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f28659-7708-4a92-8906-08541fc70887 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.956441] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5640ba-7780-462d-aa96-d94d99feca41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.970527] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 641.975020] env[62204]: DEBUG nova.compute.provider_tree [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.450063] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 642.467554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "55d1649c-5eff-4264-bce1-dd907f9531f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.467771] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.478976] env[62204]: DEBUG nova.scheduler.client.report [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.496833] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 642.496833] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 642.496833] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.497065] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 642.497811] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.497811] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 642.498454] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 642.498634] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 642.499611] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 642.499844] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 642.500050] env[62204]: DEBUG nova.virt.hardware [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 642.501125] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e4b443-2d80-4db2-a50c-ca9fb4fddd47 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.504947] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.512368] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc98c2ed-5cef-408f-a8c9-31c1378150fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.892620] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.894020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.985473] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.986243] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 642.991490] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.500s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.163889] env[62204]: ERROR nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 643.163889] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.163889] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.163889] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.163889] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.163889] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.163889] env[62204]: ERROR nova.compute.manager raise self.value [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.163889] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 643.163889] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.163889] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 643.164398] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.164398] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 643.164398] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 643.164398] env[62204]: ERROR nova.compute.manager [ 643.164398] env[62204]: Traceback (most recent call last): [ 643.164398] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 643.164398] env[62204]: listener.cb(fileno) [ 643.164398] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.164398] env[62204]: result = function(*args, **kwargs) [ 643.164398] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.164398] env[62204]: return func(*args, **kwargs) [ 643.164398] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 643.164398] env[62204]: raise e [ 643.164398] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.164398] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 643.164398] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.164398] env[62204]: created_port_ids = self._update_ports_for_instance( [ 643.164398] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.164398] env[62204]: with excutils.save_and_reraise_exception(): [ 643.164398] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.164398] env[62204]: self.force_reraise() [ 643.164398] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.164398] env[62204]: raise self.value [ 643.164398] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.164398] env[62204]: updated_port = self._update_port( [ 643.164398] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.164398] env[62204]: _ensure_no_port_binding_failure(port) [ 643.164398] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.164398] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 643.165106] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 643.165106] env[62204]: Removing descriptor: 14 [ 643.165106] env[62204]: ERROR nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Traceback (most recent call last): [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] yield resources [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self.driver.spawn(context, instance, image_meta, [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.165106] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] vm_ref = self.build_virtual_machine(instance, [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] for vif in network_info: [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return self._sync_wrapper(fn, *args, **kwargs) [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self.wait() [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self[:] = self._gt.wait() [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return self._exit_event.wait() [ 643.165411] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] result = hub.switch() [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return self.greenlet.switch() [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] result = function(*args, **kwargs) [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return func(*args, **kwargs) [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] raise e [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] nwinfo = self.network_api.allocate_for_instance( [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.165730] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] created_port_ids = self._update_ports_for_instance( [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] with excutils.save_and_reraise_exception(): [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self.force_reraise() [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] raise self.value [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] updated_port = self._update_port( [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] _ensure_no_port_binding_failure(port) [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.166050] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] raise exception.PortBindingFailed(port_id=port['id']) [ 643.166353] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 643.166353] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] [ 643.166353] env[62204]: INFO nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Terminating instance [ 643.168212] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.168212] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.168212] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.175291] env[62204]: DEBUG nova.compute.manager [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Received event network-changed-51133827-fa1c-4881-8008-e3e8a2aef9e6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 643.175615] env[62204]: DEBUG nova.compute.manager [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Refreshing instance network info cache due to event network-changed-51133827-fa1c-4881-8008-e3e8a2aef9e6. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 643.175714] env[62204]: DEBUG oslo_concurrency.lockutils [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] Acquiring lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.498731] env[62204]: DEBUG nova.compute.utils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 643.500271] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 643.500690] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 643.586976] env[62204]: DEBUG nova.policy [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2d362bd42d0453d8642bfe890c3f6d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '890d20acb3a849f4902354b481adfeab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 643.699197] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.817054] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.962754] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb543d4-201e-486a-a7ee-9d0759f3d159 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.971408] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c115363-fdeb-4b60-a3d4-fb34beb87388 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.004339] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5351dd2-b380-4357-8846-151df9c88873 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.011372] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 644.015159] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df63b39-bde4-439d-ab82-9a6326c9635f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.030226] env[62204]: DEBUG nova.compute.provider_tree [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.321084] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.321514] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 644.321553] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 644.324386] env[62204]: DEBUG oslo_concurrency.lockutils [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] Acquired lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.324599] env[62204]: DEBUG nova.network.neutron [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Refreshing network info cache for port 51133827-fa1c-4881-8008-e3e8a2aef9e6 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 644.326788] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52752337-9f07-4d28-a34d-b764340b2360 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.338639] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9d4eb9-6873-4f7a-b0c3-59195d7f95e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.363642] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e264b99-8025-471a-bc6b-238f1bca054c could not be found. [ 644.364116] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 644.365731] env[62204]: INFO nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 644.365731] env[62204]: DEBUG oslo.service.loopingcall [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.365731] env[62204]: DEBUG nova.compute.manager [-] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 644.365731] env[62204]: DEBUG nova.network.neutron [-] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.380193] env[62204]: DEBUG nova.network.neutron [-] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.527985] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Successfully created port: 20f60ef7-c150-466f-a93f-3f1727cfb3a6 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.533541] env[62204]: DEBUG nova.scheduler.client.report [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.852705] env[62204]: DEBUG nova.network.neutron [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.883397] env[62204]: DEBUG nova.network.neutron [-] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.948545] env[62204]: DEBUG nova.network.neutron [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.036376] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 645.045068] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.052s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.045068] env[62204]: ERROR nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Traceback (most recent call last): [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self.driver.spawn(context, instance, image_meta, [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.045068] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] vm_ref = self.build_virtual_machine(instance, [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] for vif in network_info: [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return self._sync_wrapper(fn, *args, **kwargs) [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self.wait() [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self[:] = self._gt.wait() [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return self._exit_event.wait() [ 645.045388] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] result = hub.switch() [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return self.greenlet.switch() [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] result = function(*args, **kwargs) [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] return func(*args, **kwargs) [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] raise e [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] nwinfo = self.network_api.allocate_for_instance( [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.045749] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] created_port_ids = self._update_ports_for_instance( [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] with excutils.save_and_reraise_exception(): [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] self.force_reraise() [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] raise self.value [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] updated_port = self._update_port( [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] _ensure_no_port_binding_failure(port) [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.046075] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] raise exception.PortBindingFailed(port_id=port['id']) [ 645.046363] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 645.046363] env[62204]: ERROR nova.compute.manager [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] [ 645.046363] env[62204]: DEBUG nova.compute.utils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.047374] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.139s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.049806] env[62204]: INFO nova.compute.claims [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.054372] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Build of instance 292b9c31-2ea1-4b28-8b60-79c6c80e1531 was re-scheduled: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 645.054939] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 645.055221] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.055393] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.055556] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.070665] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 645.070665] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 645.070665] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 645.070817] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 645.071383] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 645.071383] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 645.071383] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 645.071383] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 645.071664] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 645.071664] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 645.071830] env[62204]: DEBUG nova.virt.hardware [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 645.072968] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3613c662-f6ba-426f-bcbe-4048f2e39675 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.081315] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5504b1-cf75-496d-8377-3b7ff21e81e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.316330] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "703bf0c4-9bff-4967-8e84-09969b32b5a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.317112] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.351410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.351651] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.380295] env[62204]: DEBUG nova.compute.manager [req-f55f9851-8a87-4d87-8ec0-88cb285c48f4 req-b7107810-c3b5-412c-8854-80dd2f2311b1 service nova] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Received event network-vif-deleted-51133827-fa1c-4881-8008-e3e8a2aef9e6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 645.387262] env[62204]: INFO nova.compute.manager [-] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Took 1.02 seconds to deallocate network for instance. [ 645.391646] env[62204]: DEBUG nova.compute.claims [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 645.391820] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.450693] env[62204]: DEBUG oslo_concurrency.lockutils [req-de6af88f-efa1-479c-b1e4-b686ff3e1f4f req-ce6211b7-bda3-4699-86db-3fc0bd06d205 service nova] Releasing lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.560424] env[62204]: DEBUG nova.compute.utils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Can not refresh info_cache because instance was not found {{(pid=62204) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 645.582247] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.677558] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.179794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "refresh_cache-292b9c31-2ea1-4b28-8b60-79c6c80e1531" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.180054] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 646.180054] env[62204]: DEBUG nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 646.180597] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 646.200684] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.238632] env[62204]: ERROR nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 646.238632] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.238632] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 646.238632] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 646.238632] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.238632] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.238632] env[62204]: ERROR nova.compute.manager raise self.value [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 646.238632] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 646.238632] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.238632] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 646.239865] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.239865] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 646.239865] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 646.239865] env[62204]: ERROR nova.compute.manager [ 646.239865] env[62204]: Traceback (most recent call last): [ 646.239865] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 646.239865] env[62204]: listener.cb(fileno) [ 646.239865] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.239865] env[62204]: result = function(*args, **kwargs) [ 646.239865] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 646.239865] env[62204]: return func(*args, **kwargs) [ 646.239865] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.239865] env[62204]: raise e [ 646.239865] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.239865] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 646.239865] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 646.239865] env[62204]: created_port_ids = self._update_ports_for_instance( [ 646.239865] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 646.239865] env[62204]: with excutils.save_and_reraise_exception(): [ 646.239865] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.239865] env[62204]: self.force_reraise() [ 646.239865] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.239865] env[62204]: raise self.value [ 646.239865] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 646.239865] env[62204]: updated_port = self._update_port( [ 646.239865] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.239865] env[62204]: _ensure_no_port_binding_failure(port) [ 646.239865] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.239865] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 646.240699] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 646.240699] env[62204]: Removing descriptor: 14 [ 646.240699] env[62204]: ERROR nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] Traceback (most recent call last): [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] yield resources [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self.driver.spawn(context, instance, image_meta, [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self._vmops.spawn(context, instance, image_meta, injected_files, [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 646.240699] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] vm_ref = self.build_virtual_machine(instance, [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] vif_infos = vmwarevif.get_vif_info(self._session, [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] for vif in network_info: [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return self._sync_wrapper(fn, *args, **kwargs) [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self.wait() [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self[:] = self._gt.wait() [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return self._exit_event.wait() [ 646.241057] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] result = hub.switch() [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return self.greenlet.switch() [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] result = function(*args, **kwargs) [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return func(*args, **kwargs) [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] raise e [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] nwinfo = self.network_api.allocate_for_instance( [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 646.241431] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] created_port_ids = self._update_ports_for_instance( [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] with excutils.save_and_reraise_exception(): [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self.force_reraise() [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] raise self.value [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] updated_port = self._update_port( [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] _ensure_no_port_binding_failure(port) [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 646.241791] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] raise exception.PortBindingFailed(port_id=port['id']) [ 646.242134] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 646.242134] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] [ 646.242134] env[62204]: INFO nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Terminating instance [ 646.243655] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquiring lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 646.243747] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquired lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 646.243886] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 646.501968] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f5030e-1b9d-4ba8-b481-7c842451b6c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.509787] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ec9eb8-31f2-4271-bd8a-f5deff37aca4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.547421] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24fae46-11eb-4351-a1e8-bb5da211fd90 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.555533] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb2d270-7a33-4cad-b5b9-19e97d3506ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.574884] env[62204]: DEBUG nova.compute.provider_tree [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.649883] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "137ce499-6602-46b5-b1eb-b03282c2bab3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.650136] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.705077] env[62204]: DEBUG nova.network.neutron [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.764580] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.922472] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.078431] env[62204]: DEBUG nova.scheduler.client.report [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 647.207754] env[62204]: INFO nova.compute.manager [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 292b9c31-2ea1-4b28-8b60-79c6c80e1531] Took 1.03 seconds to deallocate network for instance. [ 647.416250] env[62204]: DEBUG nova.compute.manager [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Received event network-changed-20f60ef7-c150-466f-a93f-3f1727cfb3a6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.416250] env[62204]: DEBUG nova.compute.manager [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Refreshing instance network info cache due to event network-changed-20f60ef7-c150-466f-a93f-3f1727cfb3a6. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 647.416712] env[62204]: DEBUG oslo_concurrency.lockutils [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] Acquiring lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.424659] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Releasing lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 647.425114] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 647.425494] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 647.425834] env[62204]: DEBUG oslo_concurrency.lockutils [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] Acquired lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.426030] env[62204]: DEBUG nova.network.neutron [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Refreshing network info cache for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 647.427046] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0eaae4cd-2387-40c1-8797-3277840c0bb6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.436941] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7eef4e-db4f-484a-b127-72a3cfdbc1f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.459062] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9e573093-6434-452d-8025-4688d9f78c53 could not be found. [ 647.459350] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.460163] env[62204]: INFO nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Took 0.03 seconds to destroy the instance on the hypervisor. [ 647.460163] env[62204]: DEBUG oslo.service.loopingcall [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 647.460163] env[62204]: DEBUG nova.compute.manager [-] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 647.460413] env[62204]: DEBUG nova.network.neutron [-] [instance: 9e573093-6434-452d-8025-4688d9f78c53] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 647.476146] env[62204]: DEBUG nova.network.neutron [-] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.584075] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.585449] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 647.588105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.592s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.945841] env[62204]: DEBUG nova.network.neutron [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.978169] env[62204]: DEBUG nova.network.neutron [-] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.042200] env[62204]: DEBUG nova.network.neutron [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.093137] env[62204]: DEBUG nova.compute.utils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 648.097382] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 648.097554] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 648.145456] env[62204]: DEBUG nova.policy [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2edea246e74173bbdb4365d0309cd7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be5f3f8b28ab4b63a2621b1fe1383af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 648.483570] env[62204]: INFO nova.compute.manager [-] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Took 1.02 seconds to deallocate network for instance. [ 648.486108] env[62204]: DEBUG nova.compute.claims [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 648.486108] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.547474] env[62204]: DEBUG oslo_concurrency.lockutils [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] Releasing lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.547991] env[62204]: DEBUG nova.compute.manager [req-a882cb1d-dec2-402c-98f3-21f38a701c0b req-c52e91ad-a6ef-4e13-b337-d975a69c44f6 service nova] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Received event network-vif-deleted-20f60ef7-c150-466f-a93f-3f1727cfb3a6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 648.548853] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f582fc30-9085-484d-b2c8-d685ec150e4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.557502] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38b702c-a085-44aa-8aa7-1c807ce690dc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.591067] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129a4562-07d4-4780-af37-7bc6f47a4577 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.594345] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Successfully created port: f39354bc-f706-4aa8-859a-b97cae303bdd {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.598534] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 648.605153] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4aba57-cfdf-4dd4-a953-e32298d62ea5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.619907] env[62204]: DEBUG nova.compute.provider_tree [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.123207] env[62204]: DEBUG nova.scheduler.client.report [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 649.255957] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a52d51ae-5bde-499e-a415-89963d3712bd tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "292b9c31-2ea1-4b28-8b60-79c6c80e1531" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.644s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.256656] env[62204]: Traceback (most recent call last): [ 649.256740] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.256740] env[62204]: self.driver.spawn(context, instance, image_meta, [ 649.256740] env[62204]: File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 649.256740] env[62204]: self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.256740] env[62204]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.256740] env[62204]: vm_ref = self.build_virtual_machine(instance, [ 649.256740] env[62204]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.256740] env[62204]: vif_infos = vmwarevif.get_vif_info(self._session, [ 649.256740] env[62204]: File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.256740] env[62204]: for vif in network_info: [ 649.256740] env[62204]: File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.256740] env[62204]: return self._sync_wrapper(fn, *args, **kwargs) [ 649.256740] env[62204]: File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.256740] env[62204]: self.wait() [ 649.256740] env[62204]: File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.256740] env[62204]: self[:] = self._gt.wait() [ 649.256740] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.256740] env[62204]: return self._exit_event.wait() [ 649.256740] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.256740] env[62204]: result = hub.switch() [ 649.256740] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.256740] env[62204]: return self.greenlet.switch() [ 649.256740] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.256740] env[62204]: result = function(*args, **kwargs) [ 649.256740] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.256740] env[62204]: return func(*args, **kwargs) [ 649.256740] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.256740] env[62204]: raise e [ 649.256740] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.256740] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 649.256740] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.256740] env[62204]: created_port_ids = self._update_ports_for_instance( [ 649.258037] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.258037] env[62204]: with excutils.save_and_reraise_exception(): [ 649.258037] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.258037] env[62204]: self.force_reraise() [ 649.258037] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.258037] env[62204]: raise self.value [ 649.258037] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.258037] env[62204]: updated_port = self._update_port( [ 649.258037] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.258037] env[62204]: _ensure_no_port_binding_failure(port) [ 649.258037] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.258037] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 649.258037] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 649.258037] env[62204]: During handling of the above exception, another exception occurred: [ 649.258037] env[62204]: Traceback (most recent call last): [ 649.258037] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 649.258037] env[62204]: self._build_and_run_instance(context, instance, image, [ 649.258037] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 649.258037] env[62204]: raise exception.RescheduledException( [ 649.258037] env[62204]: nova.exception.RescheduledException: Build of instance 292b9c31-2ea1-4b28-8b60-79c6c80e1531 was re-scheduled: Binding failed for port 8706abc8-e1b9-4c2f-a8cc-b80ff3928dc9, please check neutron logs for more information. [ 649.258037] env[62204]: During handling of the above exception, another exception occurred: [ 649.258037] env[62204]: Traceback (most recent call last): [ 649.258037] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 649.258037] env[62204]: func(*args, **kwargs) [ 649.258037] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.258037] env[62204]: return func(*args, **kwargs) [ 649.258037] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 649.258037] env[62204]: return f(*args, **kwargs) [ 649.259103] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2347, in _locked_do_build_and_run_instance [ 649.259103] env[62204]: result = self._do_build_and_run_instance(*args, **kwargs) [ 649.259103] env[62204]: File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 649.259103] env[62204]: with excutils.save_and_reraise_exception(): [ 649.259103] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.259103] env[62204]: self.force_reraise() [ 649.259103] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.259103] env[62204]: raise self.value [ 649.259103] env[62204]: File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 649.259103] env[62204]: return f(self, context, *args, **kw) [ 649.259103] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 649.259103] env[62204]: with excutils.save_and_reraise_exception(): [ 649.259103] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.259103] env[62204]: self.force_reraise() [ 649.259103] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.259103] env[62204]: raise self.value [ 649.259103] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 649.259103] env[62204]: return function(self, context, *args, **kwargs) [ 649.259103] env[62204]: File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 649.259103] env[62204]: return function(self, context, *args, **kwargs) [ 649.259103] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 649.259103] env[62204]: return function(self, context, *args, **kwargs) [ 649.259103] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2491, in _do_build_and_run_instance [ 649.259103] env[62204]: instance.save() [ 649.259103] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 209, in wrapper [ 649.259103] env[62204]: updates, result = self.indirection_api.object_action( [ 649.259103] env[62204]: File "/opt/stack/nova/nova/conductor/rpcapi.py", line 247, in object_action [ 649.259103] env[62204]: return cctxt.call(context, 'object_action', objinst=objinst, [ 649.259103] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 649.259103] env[62204]: result = self.transport._send( [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 649.262344] env[62204]: return self._driver.send(target, ctxt, message, [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 788, in send [ 649.262344] env[62204]: return self._send(target, ctxt, message, wait_for_reply, timeout, [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 780, in _send [ 649.262344] env[62204]: raise result [ 649.262344] env[62204]: nova.exception_Remote.InstanceNotFound_Remote: Instance 292b9c31-2ea1-4b28-8b60-79c6c80e1531 could not be found. [ 649.262344] env[62204]: Traceback (most recent call last): [ 649.262344] env[62204]: File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 649.262344] env[62204]: return getattr(target, method)(*args, **kwargs) [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 226, in wrapper [ 649.262344] env[62204]: return fn(self, *args, **kwargs) [ 649.262344] env[62204]: File "/opt/stack/nova/nova/objects/instance.py", line 878, in save [ 649.262344] env[62204]: old_ref, inst_ref = db.instance_update_and_get_original( [ 649.262344] env[62204]: File "/opt/stack/nova/nova/db/utils.py", line 35, in wrapper [ 649.262344] env[62204]: return f(*args, **kwargs) [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 144, in wrapper [ 649.262344] env[62204]: with excutils.save_and_reraise_exception() as ectxt: [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.262344] env[62204]: self.force_reraise() [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.262344] env[62204]: raise self.value [ 649.262344] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 142, in wrapper [ 649.262344] env[62204]: return f(*args, **kwargs) [ 649.262344] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 649.262344] env[62204]: return f(context, *args, **kwargs) [ 649.262344] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 2283, in instance_update_and_get_original [ 649.262344] env[62204]: instance_ref = _instance_get_by_uuid(context, instance_uuid, [ 649.262344] env[62204]: File "/opt/stack/nova/nova/db/main/api.py", line 1405, in _instance_get_by_uuid [ 649.263115] env[62204]: raise exception.InstanceNotFound(instance_id=uuid) [ 649.263115] env[62204]: nova.exception.InstanceNotFound: Instance 292b9c31-2ea1-4b28-8b60-79c6c80e1531 could not be found. [ 649.613641] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 649.633209] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.045s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.634009] env[62204]: ERROR nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Traceback (most recent call last): [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self.driver.spawn(context, instance, image_meta, [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] vm_ref = self.build_virtual_machine(instance, [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.634009] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] for vif in network_info: [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return self._sync_wrapper(fn, *args, **kwargs) [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self.wait() [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self[:] = self._gt.wait() [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return self._exit_event.wait() [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] result = hub.switch() [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.634394] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return self.greenlet.switch() [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] result = function(*args, **kwargs) [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] return func(*args, **kwargs) [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] raise e [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] nwinfo = self.network_api.allocate_for_instance( [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] created_port_ids = self._update_ports_for_instance( [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] with excutils.save_and_reraise_exception(): [ 649.634837] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] self.force_reraise() [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] raise self.value [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] updated_port = self._update_port( [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] _ensure_no_port_binding_failure(port) [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] raise exception.PortBindingFailed(port_id=port['id']) [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] nova.exception.PortBindingFailed: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. [ 649.635203] env[62204]: ERROR nova.compute.manager [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] [ 649.635477] env[62204]: DEBUG nova.compute.utils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 649.638489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.029s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.641951] env[62204]: INFO nova.compute.claims [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.644714] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Build of instance 25e4a40e-c5a6-47f5-9e53-00d3073252fc was re-scheduled: Binding failed for port f31d329a-00eb-4234-8c40-0e7991448c15, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 649.645267] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 649.645505] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquiring lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.645651] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Acquired lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.645805] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 649.649008] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.649857] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.650058] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.650407] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.650629] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.650800] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.651012] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.651178] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.651338] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.651493] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.651664] env[62204]: DEBUG nova.virt.hardware [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.653928] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb76d9e-f15c-44bc-b88e-9a39b1a5b31d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.663078] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793f42eb-e397-424d-bacb-0f6d2f002801 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.759472] env[62204]: DEBUG nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 650.126948] env[62204]: DEBUG nova.compute.manager [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Received event network-changed-f39354bc-f706-4aa8-859a-b97cae303bdd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 650.127133] env[62204]: DEBUG nova.compute.manager [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Refreshing instance network info cache due to event network-changed-f39354bc-f706-4aa8-859a-b97cae303bdd. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 650.127352] env[62204]: DEBUG oslo_concurrency.lockutils [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] Acquiring lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.127529] env[62204]: DEBUG oslo_concurrency.lockutils [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] Acquired lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.127632] env[62204]: DEBUG nova.network.neutron [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Refreshing network info cache for port f39354bc-f706-4aa8-859a-b97cae303bdd {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 650.180144] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.277080] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.289113] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.354769] env[62204]: ERROR nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 650.354769] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.354769] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.354769] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.354769] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.354769] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.354769] env[62204]: ERROR nova.compute.manager raise self.value [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.354769] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 650.354769] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.354769] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 650.355207] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.355207] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 650.355207] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 650.355207] env[62204]: ERROR nova.compute.manager [ 650.355207] env[62204]: Traceback (most recent call last): [ 650.355207] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 650.355207] env[62204]: listener.cb(fileno) [ 650.355207] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.355207] env[62204]: result = function(*args, **kwargs) [ 650.355207] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.355207] env[62204]: return func(*args, **kwargs) [ 650.355207] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.355207] env[62204]: raise e [ 650.355207] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.355207] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 650.355207] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.355207] env[62204]: created_port_ids = self._update_ports_for_instance( [ 650.355207] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.355207] env[62204]: with excutils.save_and_reraise_exception(): [ 650.355207] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.355207] env[62204]: self.force_reraise() [ 650.355207] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.355207] env[62204]: raise self.value [ 650.355207] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.355207] env[62204]: updated_port = self._update_port( [ 650.355207] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.355207] env[62204]: _ensure_no_port_binding_failure(port) [ 650.355207] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.355207] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 650.356124] env[62204]: nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 650.356124] env[62204]: Removing descriptor: 14 [ 650.356124] env[62204]: ERROR nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Traceback (most recent call last): [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] yield resources [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self.driver.spawn(context, instance, image_meta, [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 650.356124] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] vm_ref = self.build_virtual_machine(instance, [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] vif_infos = vmwarevif.get_vif_info(self._session, [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] for vif in network_info: [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return self._sync_wrapper(fn, *args, **kwargs) [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self.wait() [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self[:] = self._gt.wait() [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return self._exit_event.wait() [ 650.356444] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] result = hub.switch() [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return self.greenlet.switch() [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] result = function(*args, **kwargs) [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return func(*args, **kwargs) [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] raise e [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] nwinfo = self.network_api.allocate_for_instance( [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 650.356756] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] created_port_ids = self._update_ports_for_instance( [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] with excutils.save_and_reraise_exception(): [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self.force_reraise() [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] raise self.value [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] updated_port = self._update_port( [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] _ensure_no_port_binding_failure(port) [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 650.357092] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] raise exception.PortBindingFailed(port_id=port['id']) [ 650.357435] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 650.357435] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] [ 650.357435] env[62204]: INFO nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Terminating instance [ 650.361512] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.620412] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "d6370e37-6f73-4334-8057-a30aa2c39682" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.620733] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "d6370e37-6f73-4334-8057-a30aa2c39682" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.649939] env[62204]: DEBUG nova.network.neutron [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.733883] env[62204]: DEBUG nova.network.neutron [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.779902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Releasing lock "refresh_cache-25e4a40e-c5a6-47f5-9e53-00d3073252fc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.780192] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 650.780307] env[62204]: DEBUG nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.781316] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.798704] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.081723] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175c5486-4252-4be3-8199-e835734ee8fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.091311] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6767f5ec-9ba2-493a-ae1e-995bca676f03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.121961] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22355f9f-ee1d-4a3c-a090-35502dcdaf33 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.129432] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2270fc46-be26-4cc3-8a8b-3af95044f935 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.145384] env[62204]: DEBUG nova.compute.provider_tree [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.238023] env[62204]: DEBUG oslo_concurrency.lockutils [req-87dbbb56-a22d-43be-a0f5-ae0107ce3489 req-d14c5159-1856-4f30-ab05-c9473936b5ba service nova] Releasing lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.238432] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.238619] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 651.304856] env[62204]: DEBUG nova.network.neutron [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.653268] env[62204]: DEBUG nova.scheduler.client.report [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.756537] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.807614] env[62204]: INFO nova.compute.manager [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] [instance: 25e4a40e-c5a6-47f5-9e53-00d3073252fc] Took 1.03 seconds to deallocate network for instance. [ 651.850047] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.158723] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.159261] env[62204]: DEBUG nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 652.161746] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.527s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.166701] env[62204]: DEBUG nova.compute.manager [req-242e8ec8-e667-4a96-8943-30a3ea279c3a req-9b5ed7b6-fcba-4f53-815c-fd058f4895a0 service nova] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Received event network-vif-deleted-f39354bc-f706-4aa8-859a-b97cae303bdd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.353758] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.353758] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 652.353758] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 652.354289] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05364d75-ac85-4e45-baa9-d229364f61ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.363307] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf60f4f-b5f1-4522-bd0d-44a02938899f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.385178] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63ed8992-0e8f-41ca-8b28-c0b2538ff61c could not be found. [ 652.385437] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.385653] env[62204]: INFO nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 652.385910] env[62204]: DEBUG oslo.service.loopingcall [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 652.386151] env[62204]: DEBUG nova.compute.manager [-] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 652.386316] env[62204]: DEBUG nova.network.neutron [-] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 652.409259] env[62204]: DEBUG nova.network.neutron [-] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.665064] env[62204]: DEBUG nova.compute.utils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 652.669472] env[62204]: DEBUG nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 652.839542] env[62204]: INFO nova.scheduler.client.report [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Deleted allocations for instance 25e4a40e-c5a6-47f5-9e53-00d3073252fc [ 652.912431] env[62204]: DEBUG nova.network.neutron [-] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.170919] env[62204]: DEBUG nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 653.195545] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance f5a11b74-e1fe-44c0-bccb-5817bc582608 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.195720] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 5e264b99-8025-471a-bc6b-238f1bca054c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.195843] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 9e573093-6434-452d-8025-4688d9f78c53 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.195964] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 63ed8992-0e8f-41ca-8b28-c0b2538ff61c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.196090] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7c21539c-35fa-4f58-beb0-e965ffaf79af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 653.348025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-771486f2-e017-40a0-87c4-81a5328140a8 tempest-ImagesOneServerTestJSON-2007540257 tempest-ImagesOneServerTestJSON-2007540257-project-member] Lock "25e4a40e-c5a6-47f5-9e53-00d3073252fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 111.192s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.415499] env[62204]: INFO nova.compute.manager [-] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Took 1.03 seconds to deallocate network for instance. [ 653.418023] env[62204]: DEBUG nova.compute.claims [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 653.418298] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 653.700241] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 39d5f95c-7c98-4263-a46e-948d2e3d31ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 653.850240] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 654.184083] env[62204]: DEBUG nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 654.203045] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.210497] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 654.210735] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 654.210890] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.211078] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 654.211223] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.211365] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 654.211567] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 654.211712] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 654.211956] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 654.212038] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 654.212201] env[62204]: DEBUG nova.virt.hardware [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.213076] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb511459-bea9-4e1e-a7a4-8329643a6264 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.223125] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c2dbda-7e92-4dbf-bc49-e1dbe8654ccc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.242020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 654.245519] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Creating folder: Project (80bc4a089aa049eb858f4845aa8d69d3). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 654.245791] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d38989ad-eea0-414f-80fb-514a5cd819cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.257819] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Created folder: Project (80bc4a089aa049eb858f4845aa8d69d3) in parent group-v259933. [ 654.258040] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Creating folder: Instances. Parent ref: group-v259950. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 654.258294] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab955254-ebdd-4237-aba2-8bc2694774e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.269041] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Created folder: Instances in parent group-v259950. [ 654.269041] env[62204]: DEBUG oslo.service.loopingcall [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 654.269041] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 654.269041] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c8ac18a-2968-4882-bc9b-3b3b9581d6a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.286509] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 654.286509] env[62204]: value = "task-1199422" [ 654.286509] env[62204]: _type = "Task" [ 654.286509] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.299094] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199422, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.373338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.708572] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7cccaaf2-f17d-426d-9340-e33260235706 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 654.803372] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199422, 'name': CreateVM_Task, 'duration_secs': 0.264201} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.803556] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.804292] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.804292] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.804498] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 654.804755] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9299454-cad7-4c50-a722-7e204ba4dd27 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.809987] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 654.809987] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c7a23f-3124-cf76-5bbb-0f7ced8090b4" [ 654.809987] env[62204]: _type = "Task" [ 654.809987] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.817153] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c7a23f-3124-cf76-5bbb-0f7ced8090b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.211724] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance ce74983e-8347-425c-967a-6a78a7daa701 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.321200] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c7a23f-3124-cf76-5bbb-0f7ced8090b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011013} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.321503] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 655.321727] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 655.321961] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.322128] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.322306] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.322556] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-346e77ce-d3f2-41ef-949e-d8ed4cb343a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.330680] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.330883] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 655.331602] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde674cf-536f-436d-bea4-8d2a1b0a04a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.336960] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 655.336960] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521ebf34-b046-0d0d-459d-2243b97c1a7d" [ 655.336960] env[62204]: _type = "Task" [ 655.336960] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.344331] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521ebf34-b046-0d0d-459d-2243b97c1a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.715731] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 0ab619ea-755b-4d71-9c12-0eeda0b42a39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 655.848636] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521ebf34-b046-0d0d-459d-2243b97c1a7d, 'name': SearchDatastore_Task, 'duration_secs': 0.008358} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.849632] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4aa0637d-a4eb-4c4c-9eb9-f70a9f0530de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.854590] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 655.854590] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52807b8c-ef3a-edf3-ed30-83362c419d3a" [ 655.854590] env[62204]: _type = "Task" [ 655.854590] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.862902] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52807b8c-ef3a-edf3-ed30-83362c419d3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.218466] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance bcb11a72-4394-42a2-9a9f-295adc1abcd0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.365944] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52807b8c-ef3a-edf3-ed30-83362c419d3a, 'name': SearchDatastore_Task, 'duration_secs': 0.009796} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.366242] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.366560] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 7c21539c-35fa-4f58-beb0-e965ffaf79af/7c21539c-35fa-4f58-beb0-e965ffaf79af.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 656.366821] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab0560c6-10a3-4442-a509-a9342f5e1478 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.374365] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 656.374365] env[62204]: value = "task-1199423" [ 656.374365] env[62204]: _type = "Task" [ 656.374365] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.381404] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.724445] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6665383b-f5fd-4fdf-b625-86cfb0869419 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 656.884994] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470763} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.885330] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 7c21539c-35fa-4f58-beb0-e965ffaf79af/7c21539c-35fa-4f58-beb0-e965ffaf79af.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.885615] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.885864] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e629662-378c-4662-bb6e-facf72b08971 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.891982] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 656.891982] env[62204]: value = "task-1199424" [ 656.891982] env[62204]: _type = "Task" [ 656.891982] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.900191] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.227896] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 432115aa-8999-40fe-a0cb-31433575c912 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.406027] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072127} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.406027] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 657.406027] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f3444e-19d3-4782-b588-f9e2637f2676 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.422487] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 7c21539c-35fa-4f58-beb0-e965ffaf79af/7c21539c-35fa-4f58-beb0-e965ffaf79af.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 657.422750] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-916a4c6d-2bad-4e00-8dd9-de83838fd4ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.442446] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 657.442446] env[62204]: value = "task-1199425" [ 657.442446] env[62204]: _type = "Task" [ 657.442446] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.452846] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199425, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.733886] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 48fe8f43-4ab9-41de-9b81-35b4438585ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 657.951968] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199425, 'name': ReconfigVM_Task, 'duration_secs': 0.290081} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.952263] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 7c21539c-35fa-4f58-beb0-e965ffaf79af/7c21539c-35fa-4f58-beb0-e965ffaf79af.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.952835] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-854bd0f4-5b56-447e-9c69-c88308ca5f0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.959429] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 657.959429] env[62204]: value = "task-1199426" [ 657.959429] env[62204]: _type = "Task" [ 657.959429] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.968053] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199426, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.237709] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 0a4a432d-a71a-4da7-be90-25dcec5a64c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.469378] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199426, 'name': Rename_Task, 'duration_secs': 0.134621} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.469641] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 658.469880] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9bb66fa9-4dbe-4235-a8e8-aa4653ed3b79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.476590] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 658.476590] env[62204]: value = "task-1199427" [ 658.476590] env[62204]: _type = "Task" [ 658.476590] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.483955] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.740925] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 19326d9f-5f3a-4756-874f-d4d3ce25f8e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 658.987815] env[62204]: DEBUG oslo_vmware.api [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199427, 'name': PowerOnVM_Task, 'duration_secs': 0.426346} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.987815] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.987815] env[62204]: INFO nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Took 4.80 seconds to spawn the instance on the hypervisor. [ 658.987815] env[62204]: DEBUG nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 658.988963] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0501bacd-1aaa-4f42-9439-47523fe0a175 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.245748] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance b0180c2b-8edf-4d15-8d12-c754b73f6030 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.486431] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.486695] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.505504] env[62204]: INFO nova.compute.manager [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Took 25.92 seconds to build instance. [ 659.748371] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 12656a79-a836-452c-8f94-c8e142c9ec2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.008177] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f21ef03b-02f3-4c23-8324-04c0e149814f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "7c21539c-35fa-4f58-beb0-e965ffaf79af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.674s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.252041] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 258ec37d-c791-4c43-8725-0f4b4bbf9b5b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 660.515232] env[62204]: DEBUG nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 660.756137] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 186a2de8-2b9e-4c84-8502-cb0ed3b43123 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.043580] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.258942] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance a71fd192-f3b6-4f0f-900d-887d15f44d7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 661.761992] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 69604167-6a61-4723-bf7d-7ba168837839 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.265206] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 51c9e353-f2cf-41b4-b37e-1cfd5dca0518 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 662.769946] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.271837] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2727dc46-98ed-435d-89ef-41bc20cda776 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 663.774576] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 55d1649c-5eff-4264-bce1-dd907f9531f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.280694] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 664.785019] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 703bf0c4-9bff-4967-8e84-09969b32b5a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.288557] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 665.791473] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 137ce499-6602-46b5-b1eb-b03282c2bab3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.295339] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance d6370e37-6f73-4334-8057-a30aa2c39682 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 666.295634] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 666.295782] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 666.662975] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c008c3-6be4-4617-9670-c4d6b2031681 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.671185] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ce6fd4-a6e7-4c6e-827f-5645264cbd99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.700551] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b39dd32-7b40-4bf8-b7b6-d22e95022156 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.707011] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae14f0f0-cf27-4296-a09e-384c161af015 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.719467] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.223074] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.728255] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 667.728590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 15.567s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.728865] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.344s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.730484] env[62204]: INFO nova.compute.claims [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 669.050527] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28b504d-2272-4b5b-b28c-72a66fff24c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.057871] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf564214-b04e-479a-b8da-8f5beadbee54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.090047] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfae4e69-0ff8-489b-8a12-ffe3d6b3499f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.096804] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59fb744-6365-4cf0-8471-1c49d99d05d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.109507] env[62204]: DEBUG nova.compute.provider_tree [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.613314] env[62204]: DEBUG nova.scheduler.client.report [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.118603] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.119173] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 670.122786] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.374s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.627401] env[62204]: DEBUG nova.compute.utils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 670.632115] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 670.632337] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 670.691370] env[62204]: DEBUG nova.policy [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccc09ece42a34b2294dfe53d4fd4e466', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '043f76e681b84a2f8ad7836db8aafef0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 671.015011] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a487200e-93d4-478f-82f2-b108a9e88169 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.023043] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b0b663-18b5-4f10-b631-d97684fe09ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.053351] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Successfully created port: c81f162b-24a7-413f-b08a-8ba2e933db30 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.055650] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587f4104-a97c-4fb8-b285-034d1f215de0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.062547] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480e5056-0713-4ffc-939f-8b58ac12c47a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.075755] env[62204]: DEBUG nova.compute.provider_tree [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.133126] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 671.583047] env[62204]: DEBUG nova.scheduler.client.report [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.080967] env[62204]: DEBUG nova.compute.manager [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Received event network-changed-c81f162b-24a7-413f-b08a-8ba2e933db30 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 672.081153] env[62204]: DEBUG nova.compute.manager [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Refreshing instance network info cache due to event network-changed-c81f162b-24a7-413f-b08a-8ba2e933db30. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 672.081380] env[62204]: DEBUG oslo_concurrency.lockutils [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] Acquiring lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.081518] env[62204]: DEBUG oslo_concurrency.lockutils [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] Acquired lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.081673] env[62204]: DEBUG nova.network.neutron [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Refreshing network info cache for port c81f162b-24a7-413f-b08a-8ba2e933db30 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.086861] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.964s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.087474] env[62204]: ERROR nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Traceback (most recent call last): [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self.driver.spawn(context, instance, image_meta, [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] vm_ref = self.build_virtual_machine(instance, [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.087474] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] for vif in network_info: [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return self._sync_wrapper(fn, *args, **kwargs) [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self.wait() [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self[:] = self._gt.wait() [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return self._exit_event.wait() [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] result = hub.switch() [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.087832] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return self.greenlet.switch() [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] result = function(*args, **kwargs) [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] return func(*args, **kwargs) [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] raise e [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] nwinfo = self.network_api.allocate_for_instance( [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] created_port_ids = self._update_ports_for_instance( [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] with excutils.save_and_reraise_exception(): [ 672.088229] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] self.force_reraise() [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] raise self.value [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] updated_port = self._update_port( [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] _ensure_no_port_binding_failure(port) [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] raise exception.PortBindingFailed(port_id=port['id']) [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] nova.exception.PortBindingFailed: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. [ 672.088618] env[62204]: ERROR nova.compute.manager [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] [ 672.088947] env[62204]: DEBUG nova.compute.utils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.089197] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.702s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.090585] env[62204]: INFO nova.compute.claims [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 672.095032] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Build of instance f5a11b74-e1fe-44c0-bccb-5817bc582608 was re-scheduled: Binding failed for port fcabcbf5-1604-4373-9e22-9cbaff73ef96, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 672.095467] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 672.095677] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.095819] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquired lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.095971] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.145166] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 672.173848] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.174227] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.174798] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.175125] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.175516] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.175882] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.176388] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.176649] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.176906] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.177193] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.177476] env[62204]: DEBUG nova.virt.hardware [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.178725] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655769d7-9dd9-44c9-bcb6-81c804bcf422 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.190695] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af6f61f-1485-435f-b412-9d250d0c59f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.276989] env[62204]: ERROR nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 672.276989] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.276989] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.276989] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.276989] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.276989] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.276989] env[62204]: ERROR nova.compute.manager raise self.value [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.276989] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 672.276989] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.276989] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 672.277443] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.277443] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 672.277443] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 672.277443] env[62204]: ERROR nova.compute.manager [ 672.277443] env[62204]: Traceback (most recent call last): [ 672.277443] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 672.277443] env[62204]: listener.cb(fileno) [ 672.277443] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.277443] env[62204]: result = function(*args, **kwargs) [ 672.277443] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.277443] env[62204]: return func(*args, **kwargs) [ 672.277443] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.277443] env[62204]: raise e [ 672.277443] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.277443] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 672.277443] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.277443] env[62204]: created_port_ids = self._update_ports_for_instance( [ 672.277443] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.277443] env[62204]: with excutils.save_and_reraise_exception(): [ 672.277443] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.277443] env[62204]: self.force_reraise() [ 672.277443] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.277443] env[62204]: raise self.value [ 672.277443] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.277443] env[62204]: updated_port = self._update_port( [ 672.277443] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.277443] env[62204]: _ensure_no_port_binding_failure(port) [ 672.277443] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.277443] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 672.278250] env[62204]: nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 672.278250] env[62204]: Removing descriptor: 14 [ 672.278250] env[62204]: ERROR nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Traceback (most recent call last): [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] yield resources [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self.driver.spawn(context, instance, image_meta, [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.278250] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] vm_ref = self.build_virtual_machine(instance, [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] for vif in network_info: [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return self._sync_wrapper(fn, *args, **kwargs) [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self.wait() [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self[:] = self._gt.wait() [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return self._exit_event.wait() [ 672.278572] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] result = hub.switch() [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return self.greenlet.switch() [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] result = function(*args, **kwargs) [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return func(*args, **kwargs) [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] raise e [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] nwinfo = self.network_api.allocate_for_instance( [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.278910] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] created_port_ids = self._update_ports_for_instance( [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] with excutils.save_and_reraise_exception(): [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self.force_reraise() [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] raise self.value [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] updated_port = self._update_port( [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] _ensure_no_port_binding_failure(port) [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.279287] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] raise exception.PortBindingFailed(port_id=port['id']) [ 672.279599] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 672.279599] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] [ 672.279599] env[62204]: INFO nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Terminating instance [ 672.280690] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquiring lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.600573] env[62204]: DEBUG nova.network.neutron [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.616257] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.688082] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.713486] env[62204]: DEBUG nova.network.neutron [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.190975] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Releasing lock "refresh_cache-f5a11b74-e1fe-44c0-bccb-5817bc582608" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.191266] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 673.191447] env[62204]: DEBUG nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 673.191610] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.215623] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.217574] env[62204]: DEBUG oslo_concurrency.lockutils [req-152b5805-b7d7-486e-a39b-592c85f43dc5 req-e6dfc5ff-a85e-46c2-b5de-727ee135b177 service nova] Releasing lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.222016] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquired lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.222016] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 673.539332] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9434cc7d-13b8-4f09-9511-431b3f50e846 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.547555] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70709116-7372-4790-ace2-9154075643c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.578338] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e8aadf-ef18-49c2-bebb-edaab71a3248 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.585507] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11541504-dde6-4323-9804-a70fc221efbd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.599651] env[62204]: DEBUG nova.compute.provider_tree [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.718485] env[62204]: DEBUG nova.network.neutron [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.742691] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.843286] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.103061] env[62204]: DEBUG nova.scheduler.client.report [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.108034] env[62204]: DEBUG nova.compute.manager [req-4b109956-32ac-45f1-ba0a-bf56432f8158 req-806ecd47-8e26-468c-afdd-999726e4064a service nova] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Received event network-vif-deleted-c81f162b-24a7-413f-b08a-8ba2e933db30 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 674.220968] env[62204]: INFO nova.compute.manager [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: f5a11b74-e1fe-44c0-bccb-5817bc582608] Took 1.03 seconds to deallocate network for instance. [ 674.346167] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Releasing lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.346496] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 674.346738] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.347099] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-565a6e6b-6be8-4b18-a2e5-13935d2e276e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.356074] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc87bebf-c751-494f-a719-fd678d883dda {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.377803] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 39d5f95c-7c98-4263-a46e-948d2e3d31ce could not be found. [ 674.378038] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 674.378227] env[62204]: INFO nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Took 0.03 seconds to destroy the instance on the hypervisor. [ 674.378464] env[62204]: DEBUG oslo.service.loopingcall [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.378683] env[62204]: DEBUG nova.compute.manager [-] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 674.378780] env[62204]: DEBUG nova.network.neutron [-] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.391933] env[62204]: DEBUG nova.network.neutron [-] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.610425] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.521s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.611085] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 674.613759] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.109s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.615652] env[62204]: INFO nova.compute.claims [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.894408] env[62204]: DEBUG nova.network.neutron [-] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.120840] env[62204]: DEBUG nova.compute.utils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 675.124377] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 675.124377] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 675.163786] env[62204]: DEBUG nova.policy [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79aeed737e6047d2a47cbf4bb30ce7c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23e817b70dcf49aebe36a4053ed8993d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 675.248987] env[62204]: INFO nova.scheduler.client.report [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Deleted allocations for instance f5a11b74-e1fe-44c0-bccb-5817bc582608 [ 675.397551] env[62204]: INFO nova.compute.manager [-] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Took 1.02 seconds to deallocate network for instance. [ 675.400374] env[62204]: DEBUG nova.compute.claims [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 675.400548] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.495398] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Successfully created port: 071a4dbb-37dd-44af-a177-5dd45b46109a {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.624851] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 675.758433] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7c398ed-2cf5-4d45-adfb-39469112f224 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "f5a11b74-e1fe-44c0-bccb-5817bc582608" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 131.006s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.017465] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f0134f-2c45-4bd9-ba0a-3ed32e12ac40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.025198] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93faa10c-546b-4eae-84ca-21160354cb31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.054850] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc9e656-7092-45e6-b8d0-c897eca81b4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.064855] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869419cb-b908-48dd-bdd9-bf666f060270 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.075929] env[62204]: DEBUG nova.compute.provider_tree [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.261894] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 676.581029] env[62204]: DEBUG nova.scheduler.client.report [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.637776] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 676.661920] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.662409] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.662738] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.663064] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.663360] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.663638] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.663976] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.664300] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.664625] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.665019] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.665348] env[62204]: DEBUG nova.virt.hardware [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.668127] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562ee188-7728-40ba-9522-30e634cc3f0d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.676413] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039acb81-17c0-4058-9c2f-f58741f85c2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.788801] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.864440] env[62204]: DEBUG nova.compute.manager [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Received event network-changed-071a4dbb-37dd-44af-a177-5dd45b46109a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 676.864758] env[62204]: DEBUG nova.compute.manager [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Refreshing instance network info cache due to event network-changed-071a4dbb-37dd-44af-a177-5dd45b46109a. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 676.865105] env[62204]: DEBUG oslo_concurrency.lockutils [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] Acquiring lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.865247] env[62204]: DEBUG oslo_concurrency.lockutils [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] Acquired lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.865401] env[62204]: DEBUG nova.network.neutron [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Refreshing network info cache for port 071a4dbb-37dd-44af-a177-5dd45b46109a {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 676.917148] env[62204]: ERROR nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 676.917148] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.917148] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 676.917148] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 676.917148] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.917148] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.917148] env[62204]: ERROR nova.compute.manager raise self.value [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 676.917148] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 676.917148] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.917148] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 676.917575] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.917575] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 676.917575] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 676.917575] env[62204]: ERROR nova.compute.manager [ 676.917575] env[62204]: Traceback (most recent call last): [ 676.917575] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 676.917575] env[62204]: listener.cb(fileno) [ 676.917575] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.917575] env[62204]: result = function(*args, **kwargs) [ 676.917575] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 676.917575] env[62204]: return func(*args, **kwargs) [ 676.917575] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 676.917575] env[62204]: raise e [ 676.917575] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.917575] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 676.917575] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 676.917575] env[62204]: created_port_ids = self._update_ports_for_instance( [ 676.917575] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 676.917575] env[62204]: with excutils.save_and_reraise_exception(): [ 676.917575] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.917575] env[62204]: self.force_reraise() [ 676.917575] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.917575] env[62204]: raise self.value [ 676.917575] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 676.917575] env[62204]: updated_port = self._update_port( [ 676.917575] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.917575] env[62204]: _ensure_no_port_binding_failure(port) [ 676.917575] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.917575] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 676.918303] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 676.918303] env[62204]: Removing descriptor: 14 [ 676.918303] env[62204]: ERROR nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Traceback (most recent call last): [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] yield resources [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self.driver.spawn(context, instance, image_meta, [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self._vmops.spawn(context, instance, image_meta, injected_files, [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 676.918303] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] vm_ref = self.build_virtual_machine(instance, [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] vif_infos = vmwarevif.get_vif_info(self._session, [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] for vif in network_info: [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return self._sync_wrapper(fn, *args, **kwargs) [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self.wait() [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self[:] = self._gt.wait() [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return self._exit_event.wait() [ 676.918673] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] result = hub.switch() [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return self.greenlet.switch() [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] result = function(*args, **kwargs) [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return func(*args, **kwargs) [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] raise e [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] nwinfo = self.network_api.allocate_for_instance( [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 676.919102] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] created_port_ids = self._update_ports_for_instance( [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] with excutils.save_and_reraise_exception(): [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self.force_reraise() [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] raise self.value [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] updated_port = self._update_port( [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] _ensure_no_port_binding_failure(port) [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.919440] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] raise exception.PortBindingFailed(port_id=port['id']) [ 676.919749] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 676.919749] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] [ 676.919749] env[62204]: INFO nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Terminating instance [ 676.920583] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquiring lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 677.083748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.470s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 677.084295] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 677.087523] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 31.696s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.334136] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "cce823b9-6a03-4902-9794-2b93f99eef94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.334397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "cce823b9-6a03-4902-9794-2b93f99eef94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 677.389331] env[62204]: DEBUG nova.network.neutron [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.472309] env[62204]: DEBUG nova.network.neutron [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.588743] env[62204]: DEBUG nova.compute.utils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 677.590169] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 677.590305] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 677.630265] env[62204]: DEBUG nova.policy [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88b00625bb8b421291a126476960786b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf4b73a93786409198fd3b4ecb4411e8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 677.975324] env[62204]: DEBUG oslo_concurrency.lockutils [req-da2ff1df-22e1-425a-b068-26eee5184f40 req-d94c5317-cdfa-4468-842a-e63ef231b0a6 service nova] Releasing lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.975324] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquired lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.975518] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 677.991718] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c727ed8b-1144-4ea3-b948-3c75a5968993 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.999597] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15261fa-1b27-4696-a4df-16284686adfa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.030823] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Successfully created port: a6a5b112-dcdd-4022-98e5-972f84a5fd31 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.033173] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86d940c-e2ec-4f1f-8a7a-f53686bf1525 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.040565] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0192bf95-e1b7-4f28-b2d8-63741e043246 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.054863] env[62204]: DEBUG nova.compute.provider_tree [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.095621] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 678.498038] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.557464] env[62204]: DEBUG nova.scheduler.client.report [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.578035] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.937353] env[62204]: DEBUG nova.compute.manager [req-5a39cdd9-65ce-445e-a566-60b4f2efe23b req-618577ab-e876-4d1e-bd3e-126f90277808 service nova] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Received event network-vif-deleted-071a4dbb-37dd-44af-a177-5dd45b46109a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 679.063783] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.064474] env[62204]: ERROR nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Traceback (most recent call last): [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self.driver.spawn(context, instance, image_meta, [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] vm_ref = self.build_virtual_machine(instance, [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.064474] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] for vif in network_info: [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return self._sync_wrapper(fn, *args, **kwargs) [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self.wait() [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self[:] = self._gt.wait() [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return self._exit_event.wait() [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] result = hub.switch() [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.064854] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return self.greenlet.switch() [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] result = function(*args, **kwargs) [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] return func(*args, **kwargs) [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] raise e [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] nwinfo = self.network_api.allocate_for_instance( [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] created_port_ids = self._update_ports_for_instance( [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] with excutils.save_and_reraise_exception(): [ 679.065200] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] self.force_reraise() [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] raise self.value [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] updated_port = self._update_port( [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] _ensure_no_port_binding_failure(port) [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] raise exception.PortBindingFailed(port_id=port['id']) [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] nova.exception.PortBindingFailed: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. [ 679.065516] env[62204]: ERROR nova.compute.manager [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] [ 679.065795] env[62204]: DEBUG nova.compute.utils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 679.066584] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.580s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.070201] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Build of instance 5e264b99-8025-471a-bc6b-238f1bca054c was re-scheduled: Binding failed for port 51133827-fa1c-4881-8008-e3e8a2aef9e6, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 679.070629] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 679.070849] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.070988] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.071156] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 679.082351] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Releasing lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 679.082806] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 679.083439] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.083580] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-383d5c71-d7bd-4c1c-815f-c83159d5efc2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.095164] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ce70f8-f64b-4aff-929e-48156905ec1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.106253] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 679.122020] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463 could not be found. [ 679.122324] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.122503] env[62204]: INFO nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Took 0.04 seconds to destroy the instance on the hypervisor. [ 679.122780] env[62204]: DEBUG oslo.service.loopingcall [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 679.124651] env[62204]: DEBUG nova.compute.manager [-] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 679.124749] env[62204]: DEBUG nova.network.neutron [-] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 679.132656] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 679.132948] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 679.133047] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 679.133225] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 679.133367] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 679.133563] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 679.133782] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 679.133852] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 679.133996] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 679.136041] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 679.136041] env[62204]: DEBUG nova.virt.hardware [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.136041] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c38d5ba0-f318-47ec-8139-67f117262126 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.143128] env[62204]: DEBUG nova.network.neutron [-] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.145175] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75891082-1075-41db-90d3-2ba49b2b7e4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.489771] env[62204]: ERROR nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 679.489771] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.489771] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.489771] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.489771] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.489771] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.489771] env[62204]: ERROR nova.compute.manager raise self.value [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.489771] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 679.489771] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.489771] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 679.490224] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.490224] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 679.490224] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 679.490224] env[62204]: ERROR nova.compute.manager [ 679.490224] env[62204]: Traceback (most recent call last): [ 679.490224] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 679.490224] env[62204]: listener.cb(fileno) [ 679.490224] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.490224] env[62204]: result = function(*args, **kwargs) [ 679.490224] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.490224] env[62204]: return func(*args, **kwargs) [ 679.490224] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.490224] env[62204]: raise e [ 679.490224] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.490224] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 679.490224] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.490224] env[62204]: created_port_ids = self._update_ports_for_instance( [ 679.490224] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.490224] env[62204]: with excutils.save_and_reraise_exception(): [ 679.490224] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.490224] env[62204]: self.force_reraise() [ 679.490224] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.490224] env[62204]: raise self.value [ 679.490224] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.490224] env[62204]: updated_port = self._update_port( [ 679.490224] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.490224] env[62204]: _ensure_no_port_binding_failure(port) [ 679.490224] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.490224] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 679.491268] env[62204]: nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 679.491268] env[62204]: Removing descriptor: 14 [ 679.491268] env[62204]: ERROR nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Traceback (most recent call last): [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] yield resources [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self.driver.spawn(context, instance, image_meta, [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.491268] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] vm_ref = self.build_virtual_machine(instance, [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] for vif in network_info: [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return self._sync_wrapper(fn, *args, **kwargs) [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self.wait() [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self[:] = self._gt.wait() [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return self._exit_event.wait() [ 679.491623] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] result = hub.switch() [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return self.greenlet.switch() [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] result = function(*args, **kwargs) [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return func(*args, **kwargs) [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] raise e [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] nwinfo = self.network_api.allocate_for_instance( [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.492032] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] created_port_ids = self._update_ports_for_instance( [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] with excutils.save_and_reraise_exception(): [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self.force_reraise() [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] raise self.value [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] updated_port = self._update_port( [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] _ensure_no_port_binding_failure(port) [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.492493] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] raise exception.PortBindingFailed(port_id=port['id']) [ 679.492958] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 679.492958] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] [ 679.492958] env[62204]: INFO nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Terminating instance [ 679.494592] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquiring lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.494592] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquired lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.494592] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 679.600985] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 679.650490] env[62204]: DEBUG nova.network.neutron [-] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.687907] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.942741] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b0d9d2-8a97-43c0-b917-e9daf477ff80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.951557] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91208b1f-0547-4760-b29f-364a0831ce56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.982406] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ee4717-60db-40d8-8837-cb25e3c543b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.993112] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e319093-0bc7-401a-a458-91f7b2ecfbd2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.009900] env[62204]: DEBUG nova.compute.provider_tree [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.016974] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.136758] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.152355] env[62204]: INFO nova.compute.manager [-] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Took 1.03 seconds to deallocate network for instance. [ 680.154596] env[62204]: DEBUG nova.compute.claims [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 680.155228] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.189746] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "refresh_cache-5e264b99-8025-471a-bc6b-238f1bca054c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.189976] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 680.190198] env[62204]: DEBUG nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.190369] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.204890] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.515026] env[62204]: DEBUG nova.scheduler.client.report [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.639552] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Releasing lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.640011] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 680.640219] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 680.640523] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f38519ee-48e4-4a06-8aca-e4df004ae23a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.650024] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c12da9-be2b-48c5-8eeb-873ba3559e2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.672260] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7cccaaf2-f17d-426d-9340-e33260235706 could not be found. [ 680.672537] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.672757] env[62204]: INFO nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Took 0.03 seconds to destroy the instance on the hypervisor. [ 680.673052] env[62204]: DEBUG oslo.service.loopingcall [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.673336] env[62204]: DEBUG nova.compute.manager [-] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 680.673459] env[62204]: DEBUG nova.network.neutron [-] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.689027] env[62204]: DEBUG nova.network.neutron [-] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.706957] env[62204]: DEBUG nova.network.neutron [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.988592] env[62204]: DEBUG nova.compute.manager [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Received event network-changed-a6a5b112-dcdd-4022-98e5-972f84a5fd31 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.988791] env[62204]: DEBUG nova.compute.manager [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Refreshing instance network info cache due to event network-changed-a6a5b112-dcdd-4022-98e5-972f84a5fd31. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 680.989010] env[62204]: DEBUG oslo_concurrency.lockutils [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] Acquiring lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.989156] env[62204]: DEBUG oslo_concurrency.lockutils [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] Acquired lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.989312] env[62204]: DEBUG nova.network.neutron [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Refreshing network info cache for port a6a5b112-dcdd-4022-98e5-972f84a5fd31 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 681.020029] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.953s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.020637] env[62204]: ERROR nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] Traceback (most recent call last): [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self.driver.spawn(context, instance, image_meta, [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] vm_ref = self.build_virtual_machine(instance, [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.020637] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] for vif in network_info: [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return self._sync_wrapper(fn, *args, **kwargs) [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self.wait() [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self[:] = self._gt.wait() [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return self._exit_event.wait() [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] result = hub.switch() [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.020954] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return self.greenlet.switch() [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] result = function(*args, **kwargs) [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] return func(*args, **kwargs) [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] raise e [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] nwinfo = self.network_api.allocate_for_instance( [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] created_port_ids = self._update_ports_for_instance( [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] with excutils.save_and_reraise_exception(): [ 681.021348] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] self.force_reraise() [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] raise self.value [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] updated_port = self._update_port( [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] _ensure_no_port_binding_failure(port) [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] raise exception.PortBindingFailed(port_id=port['id']) [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] nova.exception.PortBindingFailed: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. [ 681.021676] env[62204]: ERROR nova.compute.manager [instance: 9e573093-6434-452d-8025-4688d9f78c53] [ 681.021955] env[62204]: DEBUG nova.compute.utils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 681.022443] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.733s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.023886] env[62204]: INFO nova.compute.claims [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.026561] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Build of instance 9e573093-6434-452d-8025-4688d9f78c53 was re-scheduled: Binding failed for port 20f60ef7-c150-466f-a93f-3f1727cfb3a6, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 681.027008] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 681.027233] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquiring lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.027378] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Acquired lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.027534] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.191146] env[62204]: DEBUG nova.network.neutron [-] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.209436] env[62204]: INFO nova.compute.manager [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 5e264b99-8025-471a-bc6b-238f1bca054c] Took 1.02 seconds to deallocate network for instance. [ 681.504875] env[62204]: DEBUG nova.network.neutron [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.546712] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.621178] env[62204]: DEBUG nova.network.neutron [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.626164] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.694420] env[62204]: INFO nova.compute.manager [-] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Took 1.02 seconds to deallocate network for instance. [ 681.697100] env[62204]: DEBUG nova.compute.claims [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 681.697293] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.122039] env[62204]: DEBUG oslo_concurrency.lockutils [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] Releasing lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.122312] env[62204]: DEBUG nova.compute.manager [req-85fa21a3-c3e5-40f5-9faa-a5cdb652f9d1 req-a8dbb6b0-2d3a-42c1-b6b9-83e6e1da5466 service nova] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Received event network-vif-deleted-a6a5b112-dcdd-4022-98e5-972f84a5fd31 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 682.128474] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Releasing lock "refresh_cache-9e573093-6434-452d-8025-4688d9f78c53" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.128682] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 682.128864] env[62204]: DEBUG nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.129019] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 682.145024] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.240439] env[62204]: INFO nova.scheduler.client.report [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted allocations for instance 5e264b99-8025-471a-bc6b-238f1bca054c [ 682.398566] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80618e8-f3c4-4f67-8a89-677c6bef810d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.407022] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735dac51-1ba1-4573-9023-fd41b46f4611 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.434748] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21be2519-c3bf-47cb-b4c8-8513c01416bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.441820] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e2843b-5946-49fc-8b70-94b1c70ba3b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.455576] env[62204]: DEBUG nova.compute.provider_tree [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 682.648372] env[62204]: DEBUG nova.network.neutron [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.749295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b3648bba-db15-4991-980f-632d190aff9a tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "5e264b99-8025-471a-bc6b-238f1bca054c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.377s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.959113] env[62204]: DEBUG nova.scheduler.client.report [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 683.153037] env[62204]: INFO nova.compute.manager [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] [instance: 9e573093-6434-452d-8025-4688d9f78c53] Took 1.02 seconds to deallocate network for instance. [ 683.254415] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 683.464021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.464463] env[62204]: DEBUG nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 683.467188] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 30.049s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.774268] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.972366] env[62204]: DEBUG nova.compute.utils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 683.976712] env[62204]: DEBUG nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 684.180313] env[62204]: INFO nova.scheduler.client.report [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Deleted allocations for instance 9e573093-6434-452d-8025-4688d9f78c53 [ 684.324262] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58874c60-7d70-4e79-86a6-352ea51150ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.332041] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaf29cb-2009-4ff4-bc49-4488b938d181 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.366123] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c9d871-d43b-4c59-84d1-07535d0cc589 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.373183] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b53fad-a520-4130-9f02-f486ea371f84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.385918] env[62204]: DEBUG nova.compute.provider_tree [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 684.390692] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.390920] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 684.477809] env[62204]: DEBUG nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 684.689478] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e7f74bdf-3d41-42cf-a99a-e802961999cb tempest-ServersTestFqdnHostnames-1262269175 tempest-ServersTestFqdnHostnames-1262269175-project-member] Lock "9e573093-6434-452d-8025-4688d9f78c53" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.980s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 684.892145] env[62204]: DEBUG nova.scheduler.client.report [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 685.195018] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 685.397667] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.930s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.398344] env[62204]: ERROR nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Traceback (most recent call last): [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self.driver.spawn(context, instance, image_meta, [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] vm_ref = self.build_virtual_machine(instance, [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.398344] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] for vif in network_info: [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return self._sync_wrapper(fn, *args, **kwargs) [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self.wait() [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self[:] = self._gt.wait() [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return self._exit_event.wait() [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] result = hub.switch() [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.398703] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return self.greenlet.switch() [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] result = function(*args, **kwargs) [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] return func(*args, **kwargs) [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] raise e [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] nwinfo = self.network_api.allocate_for_instance( [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] created_port_ids = self._update_ports_for_instance( [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] with excutils.save_and_reraise_exception(): [ 685.399065] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] self.force_reraise() [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] raise self.value [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] updated_port = self._update_port( [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] _ensure_no_port_binding_failure(port) [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] raise exception.PortBindingFailed(port_id=port['id']) [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] nova.exception.PortBindingFailed: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. [ 685.399455] env[62204]: ERROR nova.compute.manager [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] [ 685.399794] env[62204]: DEBUG nova.compute.utils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 685.400497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.027s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.401798] env[62204]: INFO nova.compute.claims [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.404239] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Build of instance 63ed8992-0e8f-41ca-8b28-c0b2538ff61c was re-scheduled: Binding failed for port f39354bc-f706-4aa8-859a-b97cae303bdd, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 685.404688] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 685.404915] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.405075] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.405235] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 685.489015] env[62204]: DEBUG nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 685.519759] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 685.519882] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 685.519918] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 685.520103] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 685.520253] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 685.520397] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 685.520599] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 685.520752] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 685.520914] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 685.521097] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 685.521278] env[62204]: DEBUG nova.virt.hardware [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 685.522134] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051a1138-47e4-4d9a-a3a1-dd58ea4f610f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.529948] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddd840e-bd6f-4bc1-8cd0-9dc75af9ba9c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.545609] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.551825] env[62204]: DEBUG oslo.service.loopingcall [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.552076] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.552291] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-463284a5-67f0-4b97-99cb-86ffecc00134 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.569940] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.569940] env[62204]: value = "task-1199428" [ 685.569940] env[62204]: _type = "Task" [ 685.569940] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.577724] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199428, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.715971] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.925493] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.038833] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.079686] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199428, 'name': CreateVM_Task, 'duration_secs': 0.243595} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.079866] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 686.080414] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.080578] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.080914] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 686.081183] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa9cc874-d2b0-4a62-863c-5f912e21c4ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.085843] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 686.085843] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fff52d-b326-0b56-a4fb-62b05306dc8f" [ 686.085843] env[62204]: _type = "Task" [ 686.085843] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.093435] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fff52d-b326-0b56-a4fb-62b05306dc8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.544413] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-63ed8992-0e8f-41ca-8b28-c0b2538ff61c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.544413] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 686.544413] env[62204]: DEBUG nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 686.544413] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.568091] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.596881] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fff52d-b326-0b56-a4fb-62b05306dc8f, 'name': SearchDatastore_Task, 'duration_secs': 0.009791} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.599395] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.599630] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 686.599853] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.599995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.600365] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 686.602133] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd5fc369-eaca-4d73-874e-a9ba8e002066 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.610879] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 686.611367] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 686.611783] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aae4806-3359-4f79-a8c1-f66c5cabfab4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.622491] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 686.622491] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5210ec41-a6e2-91ca-0e10-2ae094de835b" [ 686.622491] env[62204]: _type = "Task" [ 686.622491] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.632150] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5210ec41-a6e2-91ca-0e10-2ae094de835b, 'name': SearchDatastore_Task, 'duration_secs': 0.008448} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.635291] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e880c7a2-cabf-4c13-a36f-6374fbcf96d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.639878] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 686.639878] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5205d793-8fd1-2db8-ed16-794fc0b6006f" [ 686.639878] env[62204]: _type = "Task" [ 686.639878] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.649261] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5205d793-8fd1-2db8-ed16-794fc0b6006f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.882562] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4951df46-ff4f-4cae-9ba9-27d4455c5459 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.890655] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06f71b3-cc4f-4420-b79e-122ee9df7330 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.922715] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3624957-373d-4d4f-ac8d-455de1e63d34 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.930336] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a5e242-684b-4eff-adab-1a4677bbc072 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.944454] env[62204]: DEBUG nova.compute.provider_tree [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.071065] env[62204]: DEBUG nova.network.neutron [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.153208] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5205d793-8fd1-2db8-ed16-794fc0b6006f, 'name': SearchDatastore_Task, 'duration_secs': 0.007696} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.153208] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.153208] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 687.153208] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69813ace-abaf-498d-868f-061848710c50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.158618] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 687.158618] env[62204]: value = "task-1199429" [ 687.158618] env[62204]: _type = "Task" [ 687.158618] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.166666] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199429, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.450372] env[62204]: DEBUG nova.scheduler.client.report [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 687.574607] env[62204]: INFO nova.compute.manager [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 63ed8992-0e8f-41ca-8b28-c0b2538ff61c] Took 1.03 seconds to deallocate network for instance. [ 687.668074] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199429, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46116} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.668352] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 687.668575] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 687.668841] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4dcfb48-e11c-4c7f-9e78-2df92e293468 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.677088] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 687.677088] env[62204]: value = "task-1199430" [ 687.677088] env[62204]: _type = "Task" [ 687.677088] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.689332] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.960725] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.961197] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 687.963932] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.920s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.965427] env[62204]: INFO nova.compute.claims [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.192326] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080997} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.192684] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 688.193587] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d34660-0278-4efc-85b1-badc5bb07a65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.215085] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 688.215428] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f20b46c-d578-4470-810e-f67482a76a59 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.235849] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 688.235849] env[62204]: value = "task-1199431" [ 688.235849] env[62204]: _type = "Task" [ 688.235849] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.244359] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.469560] env[62204]: DEBUG nova.compute.utils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 688.474089] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 688.474089] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 688.544846] env[62204]: DEBUG nova.policy [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b8d9d50bbf74460baf8e759376aa628', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59f8e4f8496d42b18bfe9a99a5b1a91d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 688.611595] env[62204]: INFO nova.scheduler.client.report [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocations for instance 63ed8992-0e8f-41ca-8b28-c0b2538ff61c [ 688.750013] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199431, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.980487] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 689.120028] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Successfully created port: 691de40c-8d46-4034-93f8-719356568cc2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.122690] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f51e28b-17b2-45a1-9d42-bbcd7a1f1c35 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "63ed8992-0e8f-41ca-8b28-c0b2538ff61c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.367s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.251734] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199431, 'name': ReconfigVM_Task, 'duration_secs': 0.82384} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.252053] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Reconfigured VM instance instance-00000024 to attach disk [datastore1] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 689.252969] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58abef2c-ca8c-44bc-a01e-ac402741953d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.263420] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 689.263420] env[62204]: value = "task-1199432" [ 689.263420] env[62204]: _type = "Task" [ 689.263420] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.272739] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199432, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.493589] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1d8869-a044-491c-8f5e-1d25e52301b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.503262] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc01ddc2-c743-473e-b490-39f691494dcb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.537336] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c511be-630b-4165-9dfa-c0d299210274 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.545450] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00cb2ed7-e21c-41bc-ad1b-a747e79ba803 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.561700] env[62204]: DEBUG nova.compute.provider_tree [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.627908] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 689.775288] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199432, 'name': Rename_Task, 'duration_secs': 0.168497} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.775573] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 689.775971] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2711869e-d5d1-441b-b024-35a68ea34e58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.782634] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 689.782634] env[62204]: value = "task-1199433" [ 689.782634] env[62204]: _type = "Task" [ 689.782634] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.790888] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.998482] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 690.031039] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.031288] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.031440] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.031614] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.031754] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.031890] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.033223] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.033398] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.033568] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.034871] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.034871] env[62204]: DEBUG nova.virt.hardware [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.034938] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20eb873b-dc76-4881-b9b3-e687d952523c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.043578] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180fc536-193f-43e4-9e83-385155a435b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.066388] env[62204]: DEBUG nova.scheduler.client.report [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 690.158706] env[62204]: ERROR nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 690.158706] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.158706] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 690.158706] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 690.158706] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.158706] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.158706] env[62204]: ERROR nova.compute.manager raise self.value [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 690.158706] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 690.158706] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.158706] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 690.159318] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.159318] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 690.159318] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 690.159318] env[62204]: ERROR nova.compute.manager [ 690.159318] env[62204]: Traceback (most recent call last): [ 690.159318] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 690.159318] env[62204]: listener.cb(fileno) [ 690.159318] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.159318] env[62204]: result = function(*args, **kwargs) [ 690.159318] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 690.159318] env[62204]: return func(*args, **kwargs) [ 690.159318] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 690.159318] env[62204]: raise e [ 690.159318] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.159318] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 690.159318] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 690.159318] env[62204]: created_port_ids = self._update_ports_for_instance( [ 690.159318] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 690.159318] env[62204]: with excutils.save_and_reraise_exception(): [ 690.159318] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.159318] env[62204]: self.force_reraise() [ 690.159318] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.159318] env[62204]: raise self.value [ 690.159318] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 690.159318] env[62204]: updated_port = self._update_port( [ 690.159318] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.159318] env[62204]: _ensure_no_port_binding_failure(port) [ 690.159318] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.159318] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 690.160231] env[62204]: nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 690.160231] env[62204]: Removing descriptor: 16 [ 690.160231] env[62204]: ERROR nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Traceback (most recent call last): [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] yield resources [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self.driver.spawn(context, instance, image_meta, [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self._vmops.spawn(context, instance, image_meta, injected_files, [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 690.160231] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] vm_ref = self.build_virtual_machine(instance, [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] vif_infos = vmwarevif.get_vif_info(self._session, [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] for vif in network_info: [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return self._sync_wrapper(fn, *args, **kwargs) [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self.wait() [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self[:] = self._gt.wait() [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return self._exit_event.wait() [ 690.160771] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] result = hub.switch() [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return self.greenlet.switch() [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] result = function(*args, **kwargs) [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return func(*args, **kwargs) [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] raise e [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] nwinfo = self.network_api.allocate_for_instance( [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 690.161249] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] created_port_ids = self._update_ports_for_instance( [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] with excutils.save_and_reraise_exception(): [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self.force_reraise() [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] raise self.value [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] updated_port = self._update_port( [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] _ensure_no_port_binding_failure(port) [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 690.161718] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] raise exception.PortBindingFailed(port_id=port['id']) [ 690.162119] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 690.162119] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] [ 690.162119] env[62204]: INFO nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Terminating instance [ 690.162254] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.163202] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquiring lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.163351] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquired lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.163510] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 690.260560] env[62204]: DEBUG nova.compute.manager [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Received event network-changed-691de40c-8d46-4034-93f8-719356568cc2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 690.260774] env[62204]: DEBUG nova.compute.manager [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Refreshing instance network info cache due to event network-changed-691de40c-8d46-4034-93f8-719356568cc2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 690.261045] env[62204]: DEBUG oslo_concurrency.lockutils [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] Acquiring lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 690.291980] env[62204]: DEBUG oslo_vmware.api [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199433, 'name': PowerOnVM_Task, 'duration_secs': 0.453994} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.292344] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 690.292601] env[62204]: INFO nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Took 4.80 seconds to spawn the instance on the hypervisor. [ 690.292830] env[62204]: DEBUG nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 690.293897] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f955d5-5fd1-4757-b5e5-796edb163eda {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.576888] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.576888] env[62204]: DEBUG nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 690.578289] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.178s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.681679] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 690.739787] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2c393123-87de-460a-965d-43473478a79f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.740072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2c393123-87de-460a-965d-43473478a79f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.779019] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.814828] env[62204]: INFO nova.compute.manager [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Took 40.55 seconds to build instance. [ 691.080320] env[62204]: DEBUG nova.compute.utils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 691.082388] env[62204]: DEBUG nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 691.284418] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Releasing lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 691.284908] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 691.285113] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 691.285418] env[62204]: DEBUG oslo_concurrency.lockutils [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] Acquired lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.285584] env[62204]: DEBUG nova.network.neutron [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Refreshing network info cache for port 691de40c-8d46-4034-93f8-719356568cc2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 691.286631] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1e3962e-ef8c-4177-a6e4-c90a17c0037d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.300976] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8114a14-d3f6-4e72-b077-88bddb68fed8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.317590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a41d3222-4a23-4fd2-ab73-dfc2e7bb3e0e tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "ce74983e-8347-425c-967a-6a78a7daa701" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.031s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.328553] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ab619ea-755b-4d71-9c12-0eeda0b42a39 could not be found. [ 691.328795] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 691.328979] env[62204]: INFO nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Took 0.04 seconds to destroy the instance on the hypervisor. [ 691.331263] env[62204]: DEBUG oslo.service.loopingcall [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 691.333746] env[62204]: DEBUG nova.compute.manager [-] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 691.334539] env[62204]: DEBUG nova.network.neutron [-] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 691.350734] env[62204]: DEBUG nova.network.neutron [-] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.518642] env[62204]: INFO nova.compute.manager [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Rebuilding instance [ 691.534953] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c5a6ba-facb-4e6a-87b8-0362d9d67b5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.543105] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512ee10b-c07a-43a7-8143-94ff2b49a927 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.578679] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3973d98-674d-48cd-9639-adf2295fd4c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.582882] env[62204]: DEBUG nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 691.588379] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799c0f46-0666-4384-bc31-eb0c12d71626 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.605230] env[62204]: DEBUG nova.compute.provider_tree [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.609050] env[62204]: DEBUG nova.compute.manager [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 691.609050] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fabaf4-b749-4b8f-a41f-3febf2302148 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.808105] env[62204]: DEBUG nova.network.neutron [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.821513] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 691.854121] env[62204]: DEBUG nova.network.neutron [-] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.898317] env[62204]: DEBUG nova.network.neutron [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.057624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "67ee5c4d-3825-4580-a26e-74eb8da50883" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.057863] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.082278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "1121b1b8-127e-475f-8dfc-de43911de39a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.082278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1121b1b8-127e-475f-8dfc-de43911de39a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.109360] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.109593] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.111437] env[62204]: DEBUG nova.scheduler.client.report [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 692.118384] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 692.118384] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9a15609-75f6-4f8f-b21e-958c05012980 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.125397] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 692.125397] env[62204]: value = "task-1199434" [ 692.125397] env[62204]: _type = "Task" [ 692.125397] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.141093] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.283079] env[62204]: DEBUG nova.compute.manager [req-11794c26-4f52-4fc9-a4b5-46001c15aa0b req-e0adc38e-a72e-4a8e-bdce-73ee88c603b3 service nova] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Received event network-vif-deleted-691de40c-8d46-4034-93f8-719356568cc2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 692.357456] env[62204]: INFO nova.compute.manager [-] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Took 1.02 seconds to deallocate network for instance. [ 692.360748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.361513] env[62204]: DEBUG nova.compute.claims [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 692.361794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 692.402552] env[62204]: DEBUG oslo_concurrency.lockutils [req-51736bb3-392b-49aa-97fe-7d33c275df2a req-ffad56c8-1c11-4d7b-850b-ee0dc127ae92 service nova] Releasing lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.593766] env[62204]: DEBUG nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 692.616880] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.038s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.617633] env[62204]: ERROR nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Traceback (most recent call last): [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self.driver.spawn(context, instance, image_meta, [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] vm_ref = self.build_virtual_machine(instance, [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] vif_infos = vmwarevif.get_vif_info(self._session, [ 692.617633] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] for vif in network_info: [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return self._sync_wrapper(fn, *args, **kwargs) [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self.wait() [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self[:] = self._gt.wait() [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return self._exit_event.wait() [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] result = hub.switch() [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 692.618045] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return self.greenlet.switch() [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] result = function(*args, **kwargs) [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] return func(*args, **kwargs) [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] raise e [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] nwinfo = self.network_api.allocate_for_instance( [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] created_port_ids = self._update_ports_for_instance( [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] with excutils.save_and_reraise_exception(): [ 692.618452] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] self.force_reraise() [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] raise self.value [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] updated_port = self._update_port( [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] _ensure_no_port_binding_failure(port) [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] raise exception.PortBindingFailed(port_id=port['id']) [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] nova.exception.PortBindingFailed: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. [ 692.618862] env[62204]: ERROR nova.compute.manager [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] [ 692.619309] env[62204]: DEBUG nova.compute.utils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 692.621502] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.621718] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.621870] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.622440] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.622724] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.622856] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.623159] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.623430] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.623619] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.623796] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.623972] env[62204]: DEBUG nova.virt.hardware [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.624332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.836s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 692.625865] env[62204]: INFO nova.compute.claims [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 692.629620] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302c6a37-5496-432f-b8f8-f592a45694c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.632933] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Build of instance 39d5f95c-7c98-4263-a46e-948d2e3d31ce was re-scheduled: Binding failed for port c81f162b-24a7-413f-b08a-8ba2e933db30, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 692.633361] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 692.633585] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquiring lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 692.633728] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Acquired lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.633883] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 692.646743] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf314141-715c-464b-bab2-a1f4a6611e5b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.650776] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199434, 'name': PowerOffVM_Task, 'duration_secs': 0.118553} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.651775] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 692.651999] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.653116] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0103dc6-acf6-4b98-892d-83e6f019c250 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.664397] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 692.669748] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Creating folder: Project (cea821650f2642e4b1265daf24d6605b). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.670619] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f526aafc-bd44-41e2-a2fb-30c46028c982 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.674371] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 692.674890] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4010d14b-35a0-4a99-a5f4-9fef235c4ad8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.682401] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Created folder: Project (cea821650f2642e4b1265daf24d6605b) in parent group-v259933. [ 692.682586] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Creating folder: Instances. Parent ref: group-v259954. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 692.682813] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c84138bb-9d2c-429e-b6a0-5fd4d9972c91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.693066] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Created folder: Instances in parent group-v259954. [ 692.693298] env[62204]: DEBUG oslo.service.loopingcall [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 692.693492] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 692.693675] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fd36bd2-1a24-4b94-9192-08e492fbc3e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.706124] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 692.706320] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 692.706487] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleting the datastore file [datastore1] ce74983e-8347-425c-967a-6a78a7daa701 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 692.707050] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09ab4e81-cb8f-4087-b5f7-86e287a38f3e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.712683] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 692.712683] env[62204]: value = "task-1199438" [ 692.712683] env[62204]: _type = "Task" [ 692.712683] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.713846] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 692.713846] env[62204]: value = "task-1199439" [ 692.713846] env[62204]: _type = "Task" [ 692.713846] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.723972] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.726913] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199438, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.151606] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 693.225400] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199438, 'name': CreateVM_Task, 'duration_secs': 0.242151} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.228231] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.228511] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09214} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.228865] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.229043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.229364] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 693.229610] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 693.229779] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 693.229947] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.232595] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-235a9b46-8682-424a-ad3c-d0255ca9a695 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.237177] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 693.237177] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c3f46e-2871-53a9-6e10-773fca5b2c79" [ 693.237177] env[62204]: _type = "Task" [ 693.237177] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.237906] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.247276] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c3f46e-2871-53a9-6e10-773fca5b2c79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.740881] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Releasing lock "refresh_cache-39d5f95c-7c98-4263-a46e-948d2e3d31ce" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.740881] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 693.740881] env[62204]: DEBUG nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 693.740881] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 693.754034] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c3f46e-2871-53a9-6e10-773fca5b2c79, 'name': SearchDatastore_Task, 'duration_secs': 0.008462} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.754397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 693.755027] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 693.755027] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 693.755027] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.755202] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 693.755438] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb2985e3-8f28-48f2-9414-1ad0e48cd95f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.761864] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 693.765166] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 693.765400] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 693.766106] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33e99a2a-6a62-45be-894c-172cb64243c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.772481] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 693.772481] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522e71e7-0d6a-6a7f-7e1f-228730f35d72" [ 693.772481] env[62204]: _type = "Task" [ 693.772481] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.781758] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522e71e7-0d6a-6a7f-7e1f-228730f35d72, 'name': SearchDatastore_Task, 'duration_secs': 0.007952} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.782482] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cde0e65-8151-47f8-b85f-7569038997a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.790300] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 693.790300] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520fcb88-8d67-292b-e67b-2f3323c0d493" [ 693.790300] env[62204]: _type = "Task" [ 693.790300] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.798016] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520fcb88-8d67-292b-e67b-2f3323c0d493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.042763] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be27210-fb05-410d-b026-94706ec71e75 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.051898] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2067acf1-5df7-46f2-817a-8bcf22fc95b8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.084467] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6c4b55-5352-43f1-a49f-d2c3086d91b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.093195] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a07446b-2321-42f3-b523-a4bcf85917a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.107744] env[62204]: DEBUG nova.compute.provider_tree [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 694.266498] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 694.266498] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 694.266863] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 694.266863] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 694.267154] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 694.267336] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 694.267587] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 694.267777] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 694.267982] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 694.268200] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 694.268408] env[62204]: DEBUG nova.virt.hardware [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 694.268853] env[62204]: DEBUG nova.network.neutron [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.270707] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73a5838-69c5-40a3-9b6a-17d37ddebc8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.280442] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071a6049-27bc-49a6-8856-ff90d28b78b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.295894] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.301984] env[62204]: DEBUG oslo.service.loopingcall [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 694.305460] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.305704] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a0bb92d-cb3f-4a0c-8e35-86099fe7bd0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.324071] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520fcb88-8d67-292b-e67b-2f3323c0d493, 'name': SearchDatastore_Task, 'duration_secs': 0.008118} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.325314] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.325600] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 694.325851] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.325851] env[62204]: value = "task-1199440" [ 694.325851] env[62204]: _type = "Task" [ 694.325851] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.326046] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c194ad22-93e7-4e86-89de-d2f00e386d67 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.338026] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199440, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.339051] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 694.339051] env[62204]: value = "task-1199441" [ 694.339051] env[62204]: _type = "Task" [ 694.339051] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.346379] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199441, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.610025] env[62204]: DEBUG nova.scheduler.client.report [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 694.775466] env[62204]: INFO nova.compute.manager [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] [instance: 39d5f95c-7c98-4263-a46e-948d2e3d31ce] Took 1.03 seconds to deallocate network for instance. [ 694.839398] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199440, 'name': CreateVM_Task, 'duration_secs': 0.294943} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.842401] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.843023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.843023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.843297] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 694.844344] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78396249-757c-49cb-95c8-aad1b21b09fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.849450] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199441, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44523} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.850222] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.850456] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.850887] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bead7cba-9835-47f8-be5e-9e8effd29dd9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.853478] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 694.853478] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c69fbd-38a6-2f7f-f954-ac5a402221b4" [ 694.853478] env[62204]: _type = "Task" [ 694.853478] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.858149] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 694.858149] env[62204]: value = "task-1199442" [ 694.858149] env[62204]: _type = "Task" [ 694.858149] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.865059] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c69fbd-38a6-2f7f-f954-ac5a402221b4, 'name': SearchDatastore_Task, 'duration_secs': 0.008521} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.865713] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 694.865953] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.866219] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 694.866370] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.866582] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.869368] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57b88cd1-37d8-4ecf-b690-216bef255cea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.871645] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.876747] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.876917] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.877600] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fbdcdd2-e873-495a-95fc-a0e2992c201f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.882341] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 694.882341] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5257866c-c0b2-3a8a-9085-50cada4f669a" [ 694.882341] env[62204]: _type = "Task" [ 694.882341] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.889830] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5257866c-c0b2-3a8a-9085-50cada4f669a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.116356] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.492s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.116878] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 695.119682] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.965s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 695.367729] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054582} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.367989] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 695.368741] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c5cf3e-006f-4e9e-b118-0e806336fc3a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.387623] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 695.388225] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab009f7a-1a01-4537-91e8-c036536d2972 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.412046] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5257866c-c0b2-3a8a-9085-50cada4f669a, 'name': SearchDatastore_Task, 'duration_secs': 0.007503} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.413191] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 695.413191] env[62204]: value = "task-1199443" [ 695.413191] env[62204]: _type = "Task" [ 695.413191] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.413386] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35718b01-7a14-4a44-809a-c5a117836719 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.420733] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 695.420733] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521be70e-249f-9be4-f540-8e3c41fe467d" [ 695.420733] env[62204]: _type = "Task" [ 695.420733] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.424097] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199443, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.431942] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521be70e-249f-9be4-f540-8e3c41fe467d, 'name': SearchDatastore_Task, 'duration_secs': 0.008255} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.432198] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 695.432449] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.432711] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13b7e047-a9ab-452e-b81f-5cc36e466ed2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.439028] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 695.439028] env[62204]: value = "task-1199444" [ 695.439028] env[62204]: _type = "Task" [ 695.439028] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.445711] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.624594] env[62204]: DEBUG nova.compute.utils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 695.626133] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 695.626308] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 695.669428] env[62204]: DEBUG nova.policy [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdfefffe65324c74901cde7f1fe25796', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6570f5ed2a4c40628e884d6ef0e9491b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 695.802192] env[62204]: INFO nova.scheduler.client.report [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Deleted allocations for instance 39d5f95c-7c98-4263-a46e-948d2e3d31ce [ 695.925415] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199443, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.952920] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199444, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.971346] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Successfully created port: c8168e4a-cf0f-419f-b869-ee73a1cf8aba {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.079256] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16da0535-9c93-4457-b7ce-1b8ce3878c44 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.088612] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bdc1ca-8123-4ca2-a636-44c48b1e21d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.129155] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269d5871-827d-44b5-89d0-d01ca3abc5d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.132614] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 696.146311] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55d2f9b-a0a9-4c06-98b4-215d0ea36f87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.160584] env[62204]: DEBUG nova.compute.provider_tree [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.313142] env[62204]: DEBUG oslo_concurrency.lockutils [None req-14f0dcdb-e0db-453f-8827-295d3df9dc12 tempest-ServersTestJSON-213525295 tempest-ServersTestJSON-213525295-project-member] Lock "39d5f95c-7c98-4263-a46e-948d2e3d31ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 143.436s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.427056] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199443, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.447498] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199444, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.883951} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.447759] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 696.447971] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 696.448222] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c6b8c32-1470-430f-843e-d32c21f77b18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.457664] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 696.457664] env[62204]: value = "task-1199445" [ 696.457664] env[62204]: _type = "Task" [ 696.457664] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.466894] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199445, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.665569] env[62204]: DEBUG nova.scheduler.client.report [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 696.714274] env[62204]: DEBUG nova.compute.manager [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Received event network-changed-c8168e4a-cf0f-419f-b869-ee73a1cf8aba {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 696.714274] env[62204]: DEBUG nova.compute.manager [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Refreshing instance network info cache due to event network-changed-c8168e4a-cf0f-419f-b869-ee73a1cf8aba. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 696.714274] env[62204]: DEBUG oslo_concurrency.lockutils [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] Acquiring lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.714274] env[62204]: DEBUG oslo_concurrency.lockutils [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] Acquired lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.714274] env[62204]: DEBUG nova.network.neutron [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Refreshing network info cache for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 696.820014] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 696.927593] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199443, 'name': ReconfigVM_Task, 'duration_secs': 1.033931} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.927876] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Reconfigured VM instance instance-00000026 to attach disk [datastore2] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.928525] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14b0280b-e9c2-4695-96ce-2c2f9c47061d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.935439] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 696.935439] env[62204]: value = "task-1199446" [ 696.935439] env[62204]: _type = "Task" [ 696.935439] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.943476] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199446, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.967929] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199445, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069225} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.967929] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.968141] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c400bdae-a2dc-4cc0-9f73-67a5da3d869d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.990037] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.991122] env[62204]: ERROR nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 696.991122] env[62204]: ERROR nova.compute.manager Traceback (most recent call last): [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 696.991122] env[62204]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 696.991122] env[62204]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 696.991122] env[62204]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 696.991122] env[62204]: ERROR nova.compute.manager self.force_reraise() [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 696.991122] env[62204]: ERROR nova.compute.manager raise self.value [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 696.991122] env[62204]: ERROR nova.compute.manager updated_port = self._update_port( [ 696.991122] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 696.991122] env[62204]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 696.991589] env[62204]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 696.991589] env[62204]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 696.991589] env[62204]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 696.991589] env[62204]: ERROR nova.compute.manager [ 696.991589] env[62204]: Traceback (most recent call last): [ 696.991589] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 696.991589] env[62204]: listener.cb(fileno) [ 696.991589] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 696.991589] env[62204]: result = function(*args, **kwargs) [ 696.991589] env[62204]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 696.991589] env[62204]: return func(*args, **kwargs) [ 696.991589] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 696.991589] env[62204]: raise e [ 696.991589] env[62204]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 696.991589] env[62204]: nwinfo = self.network_api.allocate_for_instance( [ 696.991589] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 696.991589] env[62204]: created_port_ids = self._update_ports_for_instance( [ 696.991589] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 696.991589] env[62204]: with excutils.save_and_reraise_exception(): [ 696.991589] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 696.991589] env[62204]: self.force_reraise() [ 696.991589] env[62204]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 696.991589] env[62204]: raise self.value [ 696.991589] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 696.991589] env[62204]: updated_port = self._update_port( [ 696.991589] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 696.991589] env[62204]: _ensure_no_port_binding_failure(port) [ 696.991589] env[62204]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 696.991589] env[62204]: raise exception.PortBindingFailed(port_id=port['id']) [ 696.993076] env[62204]: nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 696.993076] env[62204]: Removing descriptor: 16 [ 696.993076] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8369d24b-477e-4a31-8c3f-23f3e755aa0e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.011447] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 697.011447] env[62204]: value = "task-1199447" [ 697.011447] env[62204]: _type = "Task" [ 697.011447] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.019415] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199447, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.143920] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 697.168477] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.169268] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.169268] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.169268] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.169268] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.169268] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.169491] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.169573] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.169736] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.169903] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.170102] env[62204]: DEBUG nova.virt.hardware [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.170907] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df45d1ff-7ccb-4bb1-96b4-209bf8d0c8ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.174215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.054s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.174893] env[62204]: ERROR nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Traceback (most recent call last): [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self.driver.spawn(context, instance, image_meta, [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] vm_ref = self.build_virtual_machine(instance, [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.174893] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] for vif in network_info: [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return self._sync_wrapper(fn, *args, **kwargs) [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self.wait() [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self[:] = self._gt.wait() [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return self._exit_event.wait() [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] result = hub.switch() [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.175287] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return self.greenlet.switch() [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] result = function(*args, **kwargs) [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] return func(*args, **kwargs) [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] raise e [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] nwinfo = self.network_api.allocate_for_instance( [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] created_port_ids = self._update_ports_for_instance( [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] with excutils.save_and_reraise_exception(): [ 697.175860] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] self.force_reraise() [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] raise self.value [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] updated_port = self._update_port( [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] _ensure_no_port_binding_failure(port) [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] raise exception.PortBindingFailed(port_id=port['id']) [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] nova.exception.PortBindingFailed: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. [ 697.176242] env[62204]: ERROR nova.compute.manager [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] [ 697.176538] env[62204]: DEBUG nova.compute.utils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.177115] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.480s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.180416] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Build of instance 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463 was re-scheduled: Binding failed for port 071a4dbb-37dd-44af-a177-5dd45b46109a, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 697.180872] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 697.181179] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquiring lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.181179] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Acquired lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.181364] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.185669] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db9f9982-560f-4bb0-96dc-d62edd944c55 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.201389] env[62204]: ERROR nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Traceback (most recent call last): [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] yield resources [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self.driver.spawn(context, instance, image_meta, [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] vm_ref = self.build_virtual_machine(instance, [ 697.201389] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] for vif in network_info: [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] return self._sync_wrapper(fn, *args, **kwargs) [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self.wait() [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self[:] = self._gt.wait() [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] return self._exit_event.wait() [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.201938] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] current.throw(*self._exc) [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] result = function(*args, **kwargs) [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] return func(*args, **kwargs) [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] raise e [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] nwinfo = self.network_api.allocate_for_instance( [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] created_port_ids = self._update_ports_for_instance( [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] with excutils.save_and_reraise_exception(): [ 697.202323] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self.force_reraise() [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] raise self.value [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] updated_port = self._update_port( [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] _ensure_no_port_binding_failure(port) [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] raise exception.PortBindingFailed(port_id=port['id']) [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 697.202694] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] [ 697.202694] env[62204]: INFO nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Terminating instance [ 697.203679] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.235502] env[62204]: DEBUG nova.network.neutron [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.340218] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.365733] env[62204]: DEBUG nova.network.neutron [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.446057] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199446, 'name': Rename_Task, 'duration_secs': 0.132917} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.446478] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 697.446683] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffee6048-26ff-4d66-a06f-a6a748b3a77e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.453406] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 697.453406] env[62204]: value = "task-1199448" [ 697.453406] env[62204]: _type = "Task" [ 697.453406] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.460908] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.521426] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199447, 'name': ReconfigVM_Task, 'duration_secs': 0.27576} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.521677] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Reconfigured VM instance instance-00000024 to attach disk [datastore2] ce74983e-8347-425c-967a-6a78a7daa701/ce74983e-8347-425c-967a-6a78a7daa701.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 697.522355] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82405780-6283-4f7a-839f-530aabd08808 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.529180] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 697.529180] env[62204]: value = "task-1199449" [ 697.529180] env[62204]: _type = "Task" [ 697.529180] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.537602] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199449, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.716290] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.846054] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.869438] env[62204]: DEBUG oslo_concurrency.lockutils [req-94f7556d-67ab-47e1-aef8-34ce442a2b3a req-a3386ca1-2027-41be-9217-751bafd4d434 service nova] Releasing lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.872621] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.872984] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.968959] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199448, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.042023] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199449, 'name': Rename_Task, 'duration_secs': 0.13369} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.042023] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 698.042023] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f34dbd3-8047-49d3-b816-63b8892d1442 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.049022] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 698.049022] env[62204]: value = "task-1199450" [ 698.049022] env[62204]: _type = "Task" [ 698.049022] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.061803] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199450, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.178343] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b323c28-5d37-46ca-8f10-0f81ad4989fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.186868] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8089a3c5-1c21-470f-b478-34aba00a6f11 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.220013] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece3cda4-a117-4f9b-a864-0f4a89216d83 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.227706] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019ffb5f-465a-452b-a0e1-5259ef054cc5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.246192] env[62204]: DEBUG nova.compute.provider_tree [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.358479] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Releasing lock "refresh_cache-7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.358722] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 698.358916] env[62204]: DEBUG nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 698.359105] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 698.398797] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.400787] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.473016] env[62204]: DEBUG oslo_vmware.api [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199448, 'name': PowerOnVM_Task, 'duration_secs': 0.555525} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.473016] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 698.473016] env[62204]: INFO nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Took 5.88 seconds to spawn the instance on the hypervisor. [ 698.473016] env[62204]: DEBUG nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 698.473016] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7604ebb-4d07-4ec6-ae60-cb4eee087bab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.538053] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.559634] env[62204]: DEBUG oslo_vmware.api [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199450, 'name': PowerOnVM_Task, 'duration_secs': 0.43272} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.559906] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 698.561592] env[62204]: DEBUG nova.compute.manager [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 698.563042] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b3784e-b6d3-4f96-8278-c49f7cfc3388 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.756198] env[62204]: DEBUG nova.scheduler.client.report [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 698.841528] env[62204]: DEBUG nova.compute.manager [req-452a8ea5-6566-43fe-b04e-c03442a84775 req-40eb0d37-4a45-4fdf-943b-4870767ee906 service nova] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Received event network-vif-deleted-c8168e4a-cf0f-419f-b869-ee73a1cf8aba {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 698.904844] env[62204]: DEBUG nova.network.neutron [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.995156] env[62204]: INFO nova.compute.manager [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Took 37.97 seconds to build instance. [ 699.041107] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.041513] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.041698] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.041977] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1fba12d-2c2b-4e58-b97f-d8ac8749ea5e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.051319] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01bd0db1-5f8b-4ae7-bc28-746d3d9361e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.081025] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6665383b-f5fd-4fdf-b625-86cfb0869419 could not be found. [ 699.081025] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.081025] env[62204]: INFO nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Took 0.04 seconds to destroy the instance on the hypervisor. [ 699.081025] env[62204]: DEBUG oslo.service.loopingcall [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.081025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.081445] env[62204]: DEBUG nova.compute.manager [-] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.081445] env[62204]: DEBUG nova.network.neutron [-] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 699.097656] env[62204]: DEBUG nova.network.neutron [-] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.261746] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.085s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.262457] env[62204]: ERROR nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Traceback (most recent call last): [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self.driver.spawn(context, instance, image_meta, [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self._vmops.spawn(context, instance, image_meta, injected_files, [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] vm_ref = self.build_virtual_machine(instance, [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] vif_infos = vmwarevif.get_vif_info(self._session, [ 699.262457] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] for vif in network_info: [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return self._sync_wrapper(fn, *args, **kwargs) [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self.wait() [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self[:] = self._gt.wait() [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return self._exit_event.wait() [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] result = hub.switch() [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 699.262828] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return self.greenlet.switch() [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] result = function(*args, **kwargs) [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] return func(*args, **kwargs) [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] raise e [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] nwinfo = self.network_api.allocate_for_instance( [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] created_port_ids = self._update_ports_for_instance( [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] with excutils.save_and_reraise_exception(): [ 699.263257] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] self.force_reraise() [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] raise self.value [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] updated_port = self._update_port( [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] _ensure_no_port_binding_failure(port) [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] raise exception.PortBindingFailed(port_id=port['id']) [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] nova.exception.PortBindingFailed: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. [ 699.263609] env[62204]: ERROR nova.compute.manager [instance: 7cccaaf2-f17d-426d-9340-e33260235706] [ 699.264014] env[62204]: DEBUG nova.compute.utils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 699.264374] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.490s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.265932] env[62204]: INFO nova.compute.claims [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.268495] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Build of instance 7cccaaf2-f17d-426d-9340-e33260235706 was re-scheduled: Binding failed for port a6a5b112-dcdd-4022-98e5-972f84a5fd31, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 699.269433] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 699.269433] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquiring lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.269433] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Acquired lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.269594] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 699.336792] env[62204]: INFO nova.compute.manager [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Rebuilding instance [ 699.397223] env[62204]: DEBUG nova.compute.manager [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 699.398601] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013eef12-15b4-49c2-98a2-e377d4275c7f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.408429] env[62204]: INFO nova.compute.manager [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] [instance: 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463] Took 1.05 seconds to deallocate network for instance. [ 699.496349] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1c679448-99cb-423e-9723-fb9e56528508 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.598s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.600335] env[62204]: DEBUG nova.network.neutron [-] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.709718] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "ce74983e-8347-425c-967a-6a78a7daa701" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.710199] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "ce74983e-8347-425c-967a-6a78a7daa701" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.710199] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "ce74983e-8347-425c-967a-6a78a7daa701-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 699.710379] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "ce74983e-8347-425c-967a-6a78a7daa701-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.710532] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "ce74983e-8347-425c-967a-6a78a7daa701-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 699.712954] env[62204]: INFO nova.compute.manager [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Terminating instance [ 699.714322] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "refresh_cache-ce74983e-8347-425c-967a-6a78a7daa701" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 699.714479] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "refresh_cache-ce74983e-8347-425c-967a-6a78a7daa701" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.714652] env[62204]: DEBUG nova.network.neutron [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 699.797036] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.911999] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.914320] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 699.918649] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd6b4322-35c9-435d-a4cc-7a5f7747eb2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.925726] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 699.925726] env[62204]: value = "task-1199451" [ 699.925726] env[62204]: _type = "Task" [ 699.925726] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.936715] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.998900] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 700.103955] env[62204]: INFO nova.compute.manager [-] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Took 1.02 seconds to deallocate network for instance. [ 700.105322] env[62204]: DEBUG nova.compute.claims [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Aborting claim: {{(pid=62204) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 700.106272] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.230853] env[62204]: DEBUG nova.network.neutron [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 700.284508] env[62204]: DEBUG nova.network.neutron [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.418963] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Releasing lock "refresh_cache-7cccaaf2-f17d-426d-9340-e33260235706" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.419116] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 700.419307] env[62204]: DEBUG nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 700.419470] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 700.438642] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199451, 'name': PowerOffVM_Task, 'duration_secs': 0.135829} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.439912] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 700.440742] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 700.440950] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.444968] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654122dd-508a-4b2d-9f69-a1e4f9df4833 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.449137] env[62204]: INFO nova.scheduler.client.report [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Deleted allocations for instance 7cbaf88d-f7fb-4fbe-bca0-2e83a3341463 [ 700.463261] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 700.463631] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-120536dc-e5f4-4f71-a7e1-4e39db819f1f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.487881] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 700.488129] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 700.488330] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Deleting the datastore file [datastore2] bcb11a72-4394-42a2-9a9f-295adc1abcd0 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.488575] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bfa90f6-268f-41a9-95b4-16f3a04c49f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.499077] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 700.499077] env[62204]: value = "task-1199453" [ 700.499077] env[62204]: _type = "Task" [ 700.499077] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.508043] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.527746] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.767156] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9098d2-dc29-4c5b-8e94-d7d5f454d28a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.774896] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b3a6ec-85d8-4c18-a82b-c3031b03c265 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.816781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "refresh_cache-ce74983e-8347-425c-967a-6a78a7daa701" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.817238] env[62204]: DEBUG nova.compute.manager [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 700.817426] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.818432] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61bbff5-33ad-4aad-a540-47a5bb4a8076 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.821773] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba67aa2b-b1c1-46fe-9b69-6c65443e37ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.832346] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03870c50-14b2-49d9-ac5b-f6eabe5c605b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.836263] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 700.836496] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-025dc666-51c2-4667-b1be-022b7d32cd50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.850241] env[62204]: DEBUG nova.compute.provider_tree [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.856411] env[62204]: DEBUG oslo_vmware.api [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 700.856411] env[62204]: value = "task-1199454" [ 700.856411] env[62204]: _type = "Task" [ 700.856411] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.861528] env[62204]: DEBUG oslo_vmware.api [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.949596] env[62204]: DEBUG nova.network.neutron [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.961149] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b977a8a2-0ab2-4602-878e-8204a7733ee8 tempest-ServerAddressesNegativeTestJSON-1269477952 tempest-ServerAddressesNegativeTestJSON-1269477952-project-member] Lock "7cbaf88d-f7fb-4fbe-bca0-2e83a3341463" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.031s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.009692] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096173} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.010066] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.010271] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.010450] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.354928] env[62204]: DEBUG nova.scheduler.client.report [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 701.368951] env[62204]: DEBUG oslo_vmware.api [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199454, 'name': PowerOffVM_Task, 'duration_secs': 0.150068} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.369146] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 701.370599] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 701.370599] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7da6d0ce-14e4-46c5-9334-d52ce5f36050 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.397307] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 701.397579] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 701.397719] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleting the datastore file [datastore2] ce74983e-8347-425c-967a-6a78a7daa701 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 701.398246] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d2a888c-a86f-4816-890d-039e5aee6f8a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.403559] env[62204]: DEBUG oslo_vmware.api [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 701.403559] env[62204]: value = "task-1199456" [ 701.403559] env[62204]: _type = "Task" [ 701.403559] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.411402] env[62204]: DEBUG oslo_vmware.api [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.452280] env[62204]: INFO nova.compute.manager [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] [instance: 7cccaaf2-f17d-426d-9340-e33260235706] Took 1.03 seconds to deallocate network for instance. [ 701.463544] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 701.864900] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.865750] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 701.870153] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.154s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.872688] env[62204]: INFO nova.compute.claims [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.922839] env[62204]: DEBUG oslo_vmware.api [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097891} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.924116] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.924313] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.924580] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.924696] env[62204]: INFO nova.compute.manager [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Took 1.11 seconds to destroy the instance on the hypervisor. [ 701.925338] env[62204]: DEBUG oslo.service.loopingcall [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 701.925338] env[62204]: DEBUG nova.compute.manager [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 701.925338] env[62204]: DEBUG nova.network.neutron [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 701.947660] env[62204]: DEBUG nova.network.neutron [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.000607] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.054372] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 702.054627] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 702.054828] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.055064] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 702.055170] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.055314] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 702.055518] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 702.055674] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 702.055843] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 702.056017] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 702.056187] env[62204]: DEBUG nova.virt.hardware [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.057699] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179e3b56-460e-4dde-8304-a0645d7f3714 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.066298] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaa75d3-ebd2-4572-bac9-bd30a2a901e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.080498] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 702.086332] env[62204]: DEBUG oslo.service.loopingcall [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 702.086674] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 702.086897] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4ebab5a-9ba4-4c6d-a1f3-54ff55a79dea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.104185] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 702.104185] env[62204]: value = "task-1199457" [ 702.104185] env[62204]: _type = "Task" [ 702.104185] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.114883] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199457, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.380289] env[62204]: DEBUG nova.compute.utils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 702.384212] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 702.384395] env[62204]: DEBUG nova.network.neutron [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 702.449943] env[62204]: DEBUG nova.network.neutron [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.488844] env[62204]: DEBUG nova.policy [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdfefffe65324c74901cde7f1fe25796', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6570f5ed2a4c40628e884d6ef0e9491b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 702.496775] env[62204]: INFO nova.scheduler.client.report [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Deleted allocations for instance 7cccaaf2-f17d-426d-9340-e33260235706 [ 702.616859] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199457, 'name': CreateVM_Task, 'duration_secs': 0.294984} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.616859] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 702.616859] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.616859] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.618065] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 702.618065] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1937501d-f656-4cb0-adb4-0eb9bad48220 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.623519] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 702.623519] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d93db8-f6fc-7ff9-dce3-9a1f26b78f45" [ 702.623519] env[62204]: _type = "Task" [ 702.623519] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.631621] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d93db8-f6fc-7ff9-dce3-9a1f26b78f45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.888775] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 702.922179] env[62204]: DEBUG nova.network.neutron [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Successfully created port: b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.953022] env[62204]: INFO nova.compute.manager [-] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Took 1.03 seconds to deallocate network for instance. [ 703.005133] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4000e3b1-c773-4b94-a33b-e3e46d3e5ea8 tempest-AttachInterfacesV270Test-2085049426 tempest-AttachInterfacesV270Test-2085049426-project-member] Lock "7cccaaf2-f17d-426d-9340-e33260235706" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 148.825s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.138843] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d93db8-f6fc-7ff9-dce3-9a1f26b78f45, 'name': SearchDatastore_Task, 'duration_secs': 0.009863} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.139943] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.140208] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 703.140437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.140576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.140742] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 703.141061] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09b328b3-8d6f-4c24-a2aa-83c55c688c8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.153150] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 703.153736] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 703.154087] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b637720-dc62-4168-920d-a3dce9309b04 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.160389] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 703.160389] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525565eb-a3ad-b079-4c9e-f9686b51a327" [ 703.160389] env[62204]: _type = "Task" [ 703.160389] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.169272] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525565eb-a3ad-b079-4c9e-f9686b51a327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.450103] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91daf4b6-1af7-4fb1-88d9-60388cea369e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.460028] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c09107f-55ca-434d-8303-3120f41dce9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.465307] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.492248] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e158999c-c557-42a9-bd39-2872811733eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.499531] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe9079f-6ffd-4d99-ac4a-c6993141e62c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.513147] env[62204]: DEBUG nova.compute.provider_tree [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.515217] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 703.670785] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525565eb-a3ad-b079-4c9e-f9686b51a327, 'name': SearchDatastore_Task, 'duration_secs': 0.008705} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.672330] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4139e15-5483-4086-9f20-5fd5eebe07be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.677226] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 703.677226] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b334d3-0493-a67b-2a26-58d8a8829f13" [ 703.677226] env[62204]: _type = "Task" [ 703.677226] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.686653] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b334d3-0493-a67b-2a26-58d8a8829f13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.918186] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 703.946176] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 703.946641] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 703.946969] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 703.947700] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 703.947906] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 703.948453] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 703.948693] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 703.949297] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 703.949874] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 703.950111] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 703.950819] env[62204]: DEBUG nova.virt.hardware [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 703.951747] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b4e054-f775-4713-b866-acf4fb3c2e63 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.961240] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6c679c-0ada-409d-8936-a3ff32fddaf8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.021020] env[62204]: DEBUG nova.scheduler.client.report [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.047665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.192386] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b334d3-0493-a67b-2a26-58d8a8829f13, 'name': SearchDatastore_Task, 'duration_secs': 0.009542} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.192711] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.193070] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 704.193384] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5583abe0-298f-4eb9-94da-d2a15d98c9a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.202280] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 704.202280] env[62204]: value = "task-1199458" [ 704.202280] env[62204]: _type = "Task" [ 704.202280] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.213667] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199458, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.527503] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.529029] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 704.533017] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.371s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.536030] env[62204]: INFO nova.compute.claims [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.615203] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.615203] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.713174] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199458, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.851083] env[62204]: DEBUG nova.compute.manager [req-0e7769d9-ef14-48c6-a394-08dba8bf5aa1 req-1010a6a9-8fa4-44c6-bc2c-652a2b35b85e service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Received event network-vif-plugged-b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 704.851307] env[62204]: DEBUG oslo_concurrency.lockutils [req-0e7769d9-ef14-48c6-a394-08dba8bf5aa1 req-1010a6a9-8fa4-44c6-bc2c-652a2b35b85e service nova] Acquiring lock "432115aa-8999-40fe-a0cb-31433575c912-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.851516] env[62204]: DEBUG oslo_concurrency.lockutils [req-0e7769d9-ef14-48c6-a394-08dba8bf5aa1 req-1010a6a9-8fa4-44c6-bc2c-652a2b35b85e service nova] Lock "432115aa-8999-40fe-a0cb-31433575c912-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.851679] env[62204]: DEBUG oslo_concurrency.lockutils [req-0e7769d9-ef14-48c6-a394-08dba8bf5aa1 req-1010a6a9-8fa4-44c6-bc2c-652a2b35b85e service nova] Lock "432115aa-8999-40fe-a0cb-31433575c912-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.854428] env[62204]: DEBUG nova.compute.manager [req-0e7769d9-ef14-48c6-a394-08dba8bf5aa1 req-1010a6a9-8fa4-44c6-bc2c-652a2b35b85e service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] No waiting events found dispatching network-vif-plugged-b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 704.854428] env[62204]: WARNING nova.compute.manager [req-0e7769d9-ef14-48c6-a394-08dba8bf5aa1 req-1010a6a9-8fa4-44c6-bc2c-652a2b35b85e service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Received unexpected event network-vif-plugged-b93d9c4c-717c-4679-bcb8-b49b9517e2b8 for instance with vm_state building and task_state spawning. [ 704.984667] env[62204]: DEBUG nova.network.neutron [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Successfully updated port: b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 705.039530] env[62204]: DEBUG nova.compute.utils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 705.043432] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 705.043597] env[62204]: DEBUG nova.network.neutron [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 705.121884] env[62204]: DEBUG nova.policy [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bdfefffe65324c74901cde7f1fe25796', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6570f5ed2a4c40628e884d6ef0e9491b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 705.213465] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199458, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515511} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.213796] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 705.213935] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 705.214480] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4325eb64-971e-49fe-91be-44774a6452e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.221505] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 705.221505] env[62204]: value = "task-1199459" [ 705.221505] env[62204]: _type = "Task" [ 705.221505] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.231599] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199459, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.492528] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "refresh_cache-432115aa-8999-40fe-a0cb-31433575c912" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.492757] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "refresh_cache-432115aa-8999-40fe-a0cb-31433575c912" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.492947] env[62204]: DEBUG nova.network.neutron [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.545454] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 705.734648] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199459, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066478} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.734964] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 705.735739] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7562ed05-acbf-4ca0-a66b-f9af922f4a1a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.756335] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 705.759196] env[62204]: DEBUG nova.network.neutron [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Successfully created port: ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.760872] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0917ff9-11e2-4287-b679-9ea4c27a2402 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.781294] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 705.781294] env[62204]: value = "task-1199460" [ 705.781294] env[62204]: _type = "Task" [ 705.781294] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.792499] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199460, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.042563] env[62204]: DEBUG nova.network.neutron [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.107722] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e736fd5f-fa09-4012-a0df-d7d3bf9ba6ce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.115678] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf618d6a-fc0f-49f7-9a33-836925636cb8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.162035] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db34432f-0888-4efa-a226-362a9242d7e0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.170414] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d8cd20-b709-40a4-a68a-cb627aff4ff9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.184528] env[62204]: DEBUG nova.compute.provider_tree [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.291041] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199460, 'name': ReconfigVM_Task, 'duration_secs': 0.301916} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.291324] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Reconfigured VM instance instance-00000026 to attach disk [datastore1] bcb11a72-4394-42a2-9a9f-295adc1abcd0/bcb11a72-4394-42a2-9a9f-295adc1abcd0.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 706.291945] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-caee33f5-c7c7-4866-ac54-00f2016c44e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.295050] env[62204]: DEBUG nova.network.neutron [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Updating instance_info_cache with network_info: [{"id": "b93d9c4c-717c-4679-bcb8-b49b9517e2b8", "address": "fa:16:3e:2c:36:a5", "network": {"id": "5787eb7c-c869-4b84-a7f6-9ba8dc736602", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1039197872-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6570f5ed2a4c40628e884d6ef0e9491b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d9c4c-71", "ovs_interfaceid": "b93d9c4c-717c-4679-bcb8-b49b9517e2b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.297526] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 706.297526] env[62204]: value = "task-1199461" [ 706.297526] env[62204]: _type = "Task" [ 706.297526] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.307428] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199461, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.554295] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 706.585193] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 706.585435] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 706.585587] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.585787] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 706.585932] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.586108] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 706.586317] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 706.586472] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 706.586631] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 706.586787] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 706.586951] env[62204]: DEBUG nova.virt.hardware [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 706.587784] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5044e2c7-362d-4670-a45c-4ce468f85d1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.595856] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79420ffe-db51-4eff-b8c5-ba32a759a4ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.688163] env[62204]: DEBUG nova.scheduler.client.report [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 706.798850] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "refresh_cache-432115aa-8999-40fe-a0cb-31433575c912" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.798976] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Instance network_info: |[{"id": "b93d9c4c-717c-4679-bcb8-b49b9517e2b8", "address": "fa:16:3e:2c:36:a5", "network": {"id": "5787eb7c-c869-4b84-a7f6-9ba8dc736602", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1039197872-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6570f5ed2a4c40628e884d6ef0e9491b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d9c4c-71", "ovs_interfaceid": "b93d9c4c-717c-4679-bcb8-b49b9517e2b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 706.799532] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:36:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b93d9c4c-717c-4679-bcb8-b49b9517e2b8', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.807738] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Creating folder: Project (6570f5ed2a4c40628e884d6ef0e9491b). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.811246] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9dd20b5-970c-44cb-a924-7350be51883d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.818858] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199461, 'name': Rename_Task, 'duration_secs': 0.128398} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.819183] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.819437] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03a2314b-e307-4348-8ad5-b37af2f064ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.821967] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Created folder: Project (6570f5ed2a4c40628e884d6ef0e9491b) in parent group-v259933. [ 706.822269] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Creating folder: Instances. Parent ref: group-v259959. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.822854] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-699c1daa-8e91-46d6-b04c-1d3ddc7d144a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.826576] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 706.826576] env[62204]: value = "task-1199463" [ 706.826576] env[62204]: _type = "Task" [ 706.826576] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.832079] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Created folder: Instances in parent group-v259959. [ 706.832337] env[62204]: DEBUG oslo.service.loopingcall [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.835318] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.835534] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199463, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.835720] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b620d015-124e-4027-96b4-22ee6711c2ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.854252] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.854252] env[62204]: value = "task-1199465" [ 706.854252] env[62204]: _type = "Task" [ 706.854252] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.863874] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199465, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.884181] env[62204]: DEBUG nova.compute.manager [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Received event network-changed-b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 706.884434] env[62204]: DEBUG nova.compute.manager [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Refreshing instance network info cache due to event network-changed-b93d9c4c-717c-4679-bcb8-b49b9517e2b8. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 706.884679] env[62204]: DEBUG oslo_concurrency.lockutils [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] Acquiring lock "refresh_cache-432115aa-8999-40fe-a0cb-31433575c912" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.884914] env[62204]: DEBUG oslo_concurrency.lockutils [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] Acquired lock "refresh_cache-432115aa-8999-40fe-a0cb-31433575c912" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.885414] env[62204]: DEBUG nova.network.neutron [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Refreshing network info cache for port b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 707.194293] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 707.194849] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 707.198159] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.838s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 707.199832] env[62204]: INFO nova.compute.claims [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.336591] env[62204]: DEBUG oslo_vmware.api [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199463, 'name': PowerOnVM_Task, 'duration_secs': 0.483614} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.336867] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.337083] env[62204]: DEBUG nova.compute.manager [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 707.337852] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef07cc9e-454d-473c-9ae4-70e8e5e7cfef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.371808] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199465, 'name': CreateVM_Task, 'duration_secs': 0.349983} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.372569] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.387661] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.387898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.388291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 707.391294] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8f3c577-8314-420a-be11-1669d1c7cd54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.397919] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 707.397919] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c9edda-afb2-c2f0-11ba-4985e603a332" [ 707.397919] env[62204]: _type = "Task" [ 707.397919] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.409487] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c9edda-afb2-c2f0-11ba-4985e603a332, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.603019] env[62204]: DEBUG nova.network.neutron [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Successfully updated port: ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 707.715689] env[62204]: DEBUG nova.compute.utils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 707.715689] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 707.715689] env[62204]: DEBUG nova.network.neutron [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 707.732171] env[62204]: DEBUG nova.network.neutron [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Updated VIF entry in instance network info cache for port b93d9c4c-717c-4679-bcb8-b49b9517e2b8. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 707.732632] env[62204]: DEBUG nova.network.neutron [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Updating instance_info_cache with network_info: [{"id": "b93d9c4c-717c-4679-bcb8-b49b9517e2b8", "address": "fa:16:3e:2c:36:a5", "network": {"id": "5787eb7c-c869-4b84-a7f6-9ba8dc736602", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1039197872-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6570f5ed2a4c40628e884d6ef0e9491b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d9c4c-71", "ovs_interfaceid": "b93d9c4c-717c-4679-bcb8-b49b9517e2b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.799121] env[62204]: DEBUG nova.policy [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52fc19cbbaf14319a258f952c739c137', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd93f6aa3eaad4c5b91b657e75854f45f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 707.864738] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.909413] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c9edda-afb2-c2f0-11ba-4985e603a332, 'name': SearchDatastore_Task, 'duration_secs': 0.010135} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.909413] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.909621] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.910228] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.910228] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.910228] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.910399] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b604da9-9e38-4171-be81-9b09a620d98f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.920561] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.920743] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.921813] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0796c97-8043-45f1-9c38-f7a9e9ab3416 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.927554] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 707.927554] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a217db-e4b4-3104-6ed9-fb16adfa41a8" [ 707.927554] env[62204]: _type = "Task" [ 707.927554] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.936682] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a217db-e4b4-3104-6ed9-fb16adfa41a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.110870] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "refresh_cache-48fe8f43-4ab9-41de-9b81-35b4438585ea" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.111911] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "refresh_cache-48fe8f43-4ab9-41de-9b81-35b4438585ea" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.112051] env[62204]: DEBUG nova.network.neutron [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 708.218471] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 708.242442] env[62204]: DEBUG oslo_concurrency.lockutils [req-4561481c-de35-4d03-b96e-6465df159eb0 req-f8130cdf-0093-40be-8851-446d47155403 service nova] Releasing lock "refresh_cache-432115aa-8999-40fe-a0cb-31433575c912" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.265172] env[62204]: DEBUG nova.network.neutron [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Successfully created port: ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.441468] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a217db-e4b4-3104-6ed9-fb16adfa41a8, 'name': SearchDatastore_Task, 'duration_secs': 0.010273} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.444150] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cc00267-33f2-4d7b-bef9-2840167b000a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.451397] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 708.451397] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528cf2eb-2f0c-f7c3-0f5d-a56a801ec3ab" [ 708.451397] env[62204]: _type = "Task" [ 708.451397] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.461844] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528cf2eb-2f0c-f7c3-0f5d-a56a801ec3ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.659761] env[62204]: DEBUG nova.network.neutron [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 708.693801] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.694065] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.694266] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.694437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.694606] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.698438] env[62204]: INFO nova.compute.manager [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Terminating instance [ 708.702831] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "refresh_cache-bcb11a72-4394-42a2-9a9f-295adc1abcd0" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.702902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquired lock "refresh_cache-bcb11a72-4394-42a2-9a9f-295adc1abcd0" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.703139] env[62204]: DEBUG nova.network.neutron [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 708.790249] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1909a88-4e01-476e-818d-0373794929be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.797385] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15b4e76-3bc5-4cb3-a845-031486d22cf2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.832201] env[62204]: DEBUG nova.network.neutron [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Updating instance_info_cache with network_info: [{"id": "ba160e15-24d0-4e35-af63-89849f63afca", "address": "fa:16:3e:99:43:9d", "network": {"id": "5787eb7c-c869-4b84-a7f6-9ba8dc736602", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1039197872-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6570f5ed2a4c40628e884d6ef0e9491b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba160e15-24", "ovs_interfaceid": "ba160e15-24d0-4e35-af63-89849f63afca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.834055] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b18dc3-89cc-4dc2-ae52-bb7cf2df84b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.841360] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02a95d6-c11e-4e30-8322-41f26c8172cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.858043] env[62204]: DEBUG nova.compute.provider_tree [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.962294] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528cf2eb-2f0c-f7c3-0f5d-a56a801ec3ab, 'name': SearchDatastore_Task, 'duration_secs': 0.010881} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.962552] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 708.962810] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 432115aa-8999-40fe-a0cb-31433575c912/432115aa-8999-40fe-a0cb-31433575c912.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.963073] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ca13582-8663-425d-8fda-2d113c7bd773 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.969048] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 708.969048] env[62204]: value = "task-1199466" [ 708.969048] env[62204]: _type = "Task" [ 708.969048] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.976487] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.223411] env[62204]: DEBUG nova.network.neutron [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 709.229737] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 709.265177] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.265416] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.265573] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.265756] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.265937] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.267159] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.267159] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.267159] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.267159] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.267159] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.267368] env[62204]: DEBUG nova.virt.hardware [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.267921] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6623254-a34a-489d-85ef-759d531051a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.276793] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03f869b-3aba-4711-b663-c4ad029845b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.299083] env[62204]: DEBUG nova.network.neutron [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.337925] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "refresh_cache-48fe8f43-4ab9-41de-9b81-35b4438585ea" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.338268] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Instance network_info: |[{"id": "ba160e15-24d0-4e35-af63-89849f63afca", "address": "fa:16:3e:99:43:9d", "network": {"id": "5787eb7c-c869-4b84-a7f6-9ba8dc736602", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1039197872-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6570f5ed2a4c40628e884d6ef0e9491b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba160e15-24", "ovs_interfaceid": "ba160e15-24d0-4e35-af63-89849f63afca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 709.338734] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:43:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba160e15-24d0-4e35-af63-89849f63afca', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 709.346574] env[62204]: DEBUG oslo.service.loopingcall [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 709.347198] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 709.347511] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d2b4310-01b5-4a1c-a234-90eb0b46716f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.362941] env[62204]: DEBUG nova.scheduler.client.report [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 709.368316] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.368316] env[62204]: value = "task-1199467" [ 709.368316] env[62204]: _type = "Task" [ 709.368316] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.376460] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199467, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.439951] env[62204]: DEBUG nova.compute.manager [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Received event network-vif-plugged-ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 709.439951] env[62204]: DEBUG oslo_concurrency.lockutils [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] Acquiring lock "48fe8f43-4ab9-41de-9b81-35b4438585ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.439951] env[62204]: DEBUG oslo_concurrency.lockutils [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.440171] env[62204]: DEBUG oslo_concurrency.lockutils [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.440237] env[62204]: DEBUG nova.compute.manager [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] No waiting events found dispatching network-vif-plugged-ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 709.440380] env[62204]: WARNING nova.compute.manager [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Received unexpected event network-vif-plugged-ba160e15-24d0-4e35-af63-89849f63afca for instance with vm_state building and task_state spawning. [ 709.440520] env[62204]: DEBUG nova.compute.manager [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Received event network-changed-ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 709.441212] env[62204]: DEBUG nova.compute.manager [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Refreshing instance network info cache due to event network-changed-ba160e15-24d0-4e35-af63-89849f63afca. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 709.441212] env[62204]: DEBUG oslo_concurrency.lockutils [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] Acquiring lock "refresh_cache-48fe8f43-4ab9-41de-9b81-35b4438585ea" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.441212] env[62204]: DEBUG oslo_concurrency.lockutils [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] Acquired lock "refresh_cache-48fe8f43-4ab9-41de-9b81-35b4438585ea" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.441212] env[62204]: DEBUG nova.network.neutron [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Refreshing network info cache for port ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 709.481290] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199466, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485505} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.481565] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 432115aa-8999-40fe-a0cb-31433575c912/432115aa-8999-40fe-a0cb-31433575c912.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.481774] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.482060] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3b903fd-a885-4081-9dd9-ec95ae1da9ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.489351] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 709.489351] env[62204]: value = "task-1199468" [ 709.489351] env[62204]: _type = "Task" [ 709.489351] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.499073] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199468, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.539407] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.539700] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.805644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Releasing lock "refresh_cache-bcb11a72-4394-42a2-9a9f-295adc1abcd0" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 709.809021] env[62204]: DEBUG nova.compute.manager [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 709.809021] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 709.809021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a45efdb-d896-45af-accb-1500591046d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.822815] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 709.823896] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36f6f7cb-dbdf-4cc3-8c82-901179bfd0fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.832743] env[62204]: DEBUG oslo_vmware.api [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 709.832743] env[62204]: value = "task-1199469" [ 709.832743] env[62204]: _type = "Task" [ 709.832743] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.846896] env[62204]: DEBUG oslo_vmware.api [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.870148] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.870148] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 709.873942] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.512s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.897194] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199467, 'name': CreateVM_Task, 'duration_secs': 0.359224} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.897194] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.898218] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.898657] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.899952] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 709.900352] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e256a2c1-6156-4baf-8cf0-0a46e1a187f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.906889] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 709.906889] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ce699e-10d5-aacf-c01f-a84c166c62aa" [ 709.906889] env[62204]: _type = "Task" [ 709.906889] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.916393] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ce699e-10d5-aacf-c01f-a84c166c62aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.999382] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199468, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066379} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.999881] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.000413] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53047784-5eb3-4493-8e2e-c07b09bd556e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.022353] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 432115aa-8999-40fe-a0cb-31433575c912/432115aa-8999-40fe-a0cb-31433575c912.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.022638] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a635db7e-4f43-4dd5-a1a7-e56be541d4c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.045136] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 710.045136] env[62204]: value = "task-1199470" [ 710.045136] env[62204]: _type = "Task" [ 710.045136] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.055234] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.067189] env[62204]: DEBUG nova.network.neutron [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Successfully updated port: ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 710.193976] env[62204]: DEBUG nova.network.neutron [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Updated VIF entry in instance network info cache for port ba160e15-24d0-4e35-af63-89849f63afca. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 710.194453] env[62204]: DEBUG nova.network.neutron [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Updating instance_info_cache with network_info: [{"id": "ba160e15-24d0-4e35-af63-89849f63afca", "address": "fa:16:3e:99:43:9d", "network": {"id": "5787eb7c-c869-4b84-a7f6-9ba8dc736602", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1039197872-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6570f5ed2a4c40628e884d6ef0e9491b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba160e15-24", "ovs_interfaceid": "ba160e15-24d0-4e35-af63-89849f63afca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.343353] env[62204]: DEBUG oslo_vmware.api [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199469, 'name': PowerOffVM_Task, 'duration_secs': 0.127141} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.343721] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 710.343797] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 710.344044] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbfba4ab-e19f-4e99-b442-61d6ae7ecabd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.364558] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 710.364769] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 710.365321] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Deleting the datastore file [datastore1] bcb11a72-4394-42a2-9a9f-295adc1abcd0 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.365321] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b721423b-7911-4d90-9a2a-a78589f8c44c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.371451] env[62204]: DEBUG oslo_vmware.api [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for the task: (returnval){ [ 710.371451] env[62204]: value = "task-1199472" [ 710.371451] env[62204]: _type = "Task" [ 710.371451] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.383879] env[62204]: DEBUG nova.compute.utils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.385208] env[62204]: DEBUG oslo_vmware.api [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.385682] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 710.385878] env[62204]: DEBUG nova.network.neutron [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.419136] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ce699e-10d5-aacf-c01f-a84c166c62aa, 'name': SearchDatastore_Task, 'duration_secs': 0.011834} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.419734] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.419734] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.419969] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.420070] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.420222] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.422809] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a809c358-68bd-4352-a9d5-cbfc4b382b65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.428637] env[62204]: DEBUG nova.policy [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dfdb2d20697144818576f98054af0fd7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b4bc82065ed4b13bafddd3a3030652c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 710.432036] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.432351] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.432937] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb52b7b8-2e2e-4d67-aeb0-50d549f7fea5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.440729] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 710.440729] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52aad49f-893b-164d-f385-ee957d6651a9" [ 710.440729] env[62204]: _type = "Task" [ 710.440729] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.449734] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52aad49f-893b-164d-f385-ee957d6651a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.556915] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199470, 'name': ReconfigVM_Task, 'duration_secs': 0.29106} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.557091] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 432115aa-8999-40fe-a0cb-31433575c912/432115aa-8999-40fe-a0cb-31433575c912.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.558129] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38631c3f-b647-4829-a23d-8715830315b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.569788] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 710.569788] env[62204]: value = "task-1199473" [ 710.569788] env[62204]: _type = "Task" [ 710.569788] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.571710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 710.571710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.571848] env[62204]: DEBUG nova.network.neutron [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 710.578546] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199473, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.697092] env[62204]: DEBUG oslo_concurrency.lockutils [req-51284171-18ad-4874-9e28-2dd8ac69e24a req-8d46dc74-1c90-4f4e-8048-c81697d5e2b7 service nova] Releasing lock "refresh_cache-48fe8f43-4ab9-41de-9b81-35b4438585ea" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.705452] env[62204]: DEBUG nova.network.neutron [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Successfully created port: a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.855021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ded218e-1937-45d9-892d-bc2f5ee1bd1a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.861169] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd1250a9-ee22-4f56-8f72-7c4802f0a18a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.893404] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 710.899734] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a080e6a9-7beb-4d59-a765-094593ea775e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.908951] env[62204]: DEBUG oslo_vmware.api [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Task: {'id': task-1199472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157585} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.911056] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 710.911280] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 710.912032] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.912032] env[62204]: INFO nova.compute.manager [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Took 1.10 seconds to destroy the instance on the hypervisor. [ 710.912032] env[62204]: DEBUG oslo.service.loopingcall [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 710.912320] env[62204]: DEBUG nova.compute.manager [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 710.912320] env[62204]: DEBUG nova.network.neutron [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 710.915229] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ce3ed5-5c65-4e66-a2f7-6e201c585793 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.929711] env[62204]: DEBUG nova.compute.provider_tree [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.945684] env[62204]: DEBUG nova.network.neutron [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 710.952881] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52aad49f-893b-164d-f385-ee957d6651a9, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.954222] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-338f650c-3ea9-46ef-8af1-098b26fafae2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.960313] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 710.960313] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523903eb-8301-c3c8-eebd-0b3e86bb94ac" [ 710.960313] env[62204]: _type = "Task" [ 710.960313] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.968325] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523903eb-8301-c3c8-eebd-0b3e86bb94ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.078864] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199473, 'name': Rename_Task, 'duration_secs': 0.143542} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.080425] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 711.080487] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15787af9-3473-4821-83f7-ad9537ff95af {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.087433] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 711.087433] env[62204]: value = "task-1199474" [ 711.087433] env[62204]: _type = "Task" [ 711.087433] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.094962] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.132506] env[62204]: DEBUG nova.network.neutron [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.436286] env[62204]: DEBUG nova.scheduler.client.report [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 711.449513] env[62204]: DEBUG nova.network.neutron [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.473589] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523903eb-8301-c3c8-eebd-0b3e86bb94ac, 'name': SearchDatastore_Task, 'duration_secs': 0.010606} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.473858] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 711.474173] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 48fe8f43-4ab9-41de-9b81-35b4438585ea/48fe8f43-4ab9-41de-9b81-35b4438585ea.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 711.474433] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b414169-c008-429e-b8bd-7db7088cbd8f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.481240] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 711.481240] env[62204]: value = "task-1199475" [ 711.481240] env[62204]: _type = "Task" [ 711.481240] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.488921] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.596520] env[62204]: DEBUG oslo_vmware.api [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199474, 'name': PowerOnVM_Task, 'duration_secs': 0.469483} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.596788] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.596980] env[62204]: INFO nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Took 7.68 seconds to spawn the instance on the hypervisor. [ 711.597175] env[62204]: DEBUG nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 711.597905] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39bcbab-2555-428b-8bce-6e61904f5f94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.629379] env[62204]: DEBUG nova.network.neutron [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.653163] env[62204]: DEBUG nova.compute.manager [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Received event network-vif-plugged-ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.653433] env[62204]: DEBUG oslo_concurrency.lockutils [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.653644] env[62204]: DEBUG oslo_concurrency.lockutils [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.653807] env[62204]: DEBUG oslo_concurrency.lockutils [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.653967] env[62204]: DEBUG nova.compute.manager [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] No waiting events found dispatching network-vif-plugged-ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 711.654431] env[62204]: WARNING nova.compute.manager [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Received unexpected event network-vif-plugged-ccf86a68-c525-4b8b-940f-b0a08f2d3831 for instance with vm_state building and task_state spawning. [ 711.654610] env[62204]: DEBUG nova.compute.manager [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Received event network-changed-ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.654767] env[62204]: DEBUG nova.compute.manager [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Refreshing instance network info cache due to event network-changed-ccf86a68-c525-4b8b-940f-b0a08f2d3831. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 711.654987] env[62204]: DEBUG oslo_concurrency.lockutils [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.908290] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 711.938095] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 711.938377] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 711.938534] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.938717] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 711.938945] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.939099] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 711.939261] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 711.939414] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 711.939583] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 711.939743] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 711.939913] env[62204]: DEBUG nova.virt.hardware [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 711.940801] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2f33de-9fdd-4643-8c84-3cfe66a1e009 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.944100] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.070s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.944714] env[62204]: ERROR nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Traceback (most recent call last): [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self.driver.spawn(context, instance, image_meta, [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] vm_ref = self.build_virtual_machine(instance, [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.944714] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] for vif in network_info: [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return self._sync_wrapper(fn, *args, **kwargs) [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self.wait() [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self[:] = self._gt.wait() [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return self._exit_event.wait() [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] result = hub.switch() [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.945169] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return self.greenlet.switch() [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] result = function(*args, **kwargs) [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] return func(*args, **kwargs) [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] raise e [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] nwinfo = self.network_api.allocate_for_instance( [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] created_port_ids = self._update_ports_for_instance( [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] with excutils.save_and_reraise_exception(): [ 711.945618] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] self.force_reraise() [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] raise self.value [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] updated_port = self._update_port( [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] _ensure_no_port_binding_failure(port) [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] raise exception.PortBindingFailed(port_id=port['id']) [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] nova.exception.PortBindingFailed: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. [ 711.946097] env[62204]: ERROR nova.compute.manager [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] [ 711.946577] env[62204]: DEBUG nova.compute.utils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 711.947768] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.608s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.951664] env[62204]: INFO nova.compute.claims [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 711.952547] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Build of instance 0ab619ea-755b-4d71-9c12-0eeda0b42a39 was re-scheduled: Binding failed for port 691de40c-8d46-4034-93f8-719356568cc2, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 711.953050] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 711.953304] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquiring lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.953460] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Acquired lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.953635] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.957979] env[62204]: INFO nova.compute.manager [-] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Took 1.04 seconds to deallocate network for instance. [ 711.963715] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1831a4-87b9-495c-8d0a-368056ee1de4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.000455] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199475, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502253} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.000455] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 48fe8f43-4ab9-41de-9b81-35b4438585ea/48fe8f43-4ab9-41de-9b81-35b4438585ea.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.000455] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.000455] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cbdb4d6-9ccc-49c5-a1d4-151322a3d023 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.008443] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 712.008443] env[62204]: value = "task-1199476" [ 712.008443] env[62204]: _type = "Task" [ 712.008443] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.016907] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199476, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.116706] env[62204]: INFO nova.compute.manager [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Took 28.36 seconds to build instance. [ 712.131612] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.131933] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Instance network_info: |[{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 712.132265] env[62204]: DEBUG oslo_concurrency.lockutils [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.132507] env[62204]: DEBUG nova.network.neutron [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Refreshing network info cache for port ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 712.133649] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:54:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccf86a68-c525-4b8b-940f-b0a08f2d3831', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.141406] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating folder: Project (d93f6aa3eaad4c5b91b657e75854f45f). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.144502] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96ab6199-1dd9-4b58-94fe-b3f4717385b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.155878] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created folder: Project (d93f6aa3eaad4c5b91b657e75854f45f) in parent group-v259933. [ 712.156132] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating folder: Instances. Parent ref: group-v259963. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.156344] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71bc7774-600a-45e4-b40e-a5c3acde553c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.165426] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created folder: Instances in parent group-v259963. [ 712.165661] env[62204]: DEBUG oslo.service.loopingcall [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.165842] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.166051] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c407319-16af-439d-946e-ef27cdf49626 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.187759] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.187759] env[62204]: value = "task-1199479" [ 712.187759] env[62204]: _type = "Task" [ 712.187759] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.199926] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199479, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.259691] env[62204]: DEBUG nova.compute.manager [req-674e54bf-a992-4455-ba29-dc862c00b1a2 req-218af6fe-c287-4df0-9aa7-bc6b0f5dfa0b service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Received event network-vif-plugged-a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 712.259815] env[62204]: DEBUG oslo_concurrency.lockutils [req-674e54bf-a992-4455-ba29-dc862c00b1a2 req-218af6fe-c287-4df0-9aa7-bc6b0f5dfa0b service nova] Acquiring lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.260018] env[62204]: DEBUG oslo_concurrency.lockutils [req-674e54bf-a992-4455-ba29-dc862c00b1a2 req-218af6fe-c287-4df0-9aa7-bc6b0f5dfa0b service nova] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.260196] env[62204]: DEBUG oslo_concurrency.lockutils [req-674e54bf-a992-4455-ba29-dc862c00b1a2 req-218af6fe-c287-4df0-9aa7-bc6b0f5dfa0b service nova] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.260367] env[62204]: DEBUG nova.compute.manager [req-674e54bf-a992-4455-ba29-dc862c00b1a2 req-218af6fe-c287-4df0-9aa7-bc6b0f5dfa0b service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] No waiting events found dispatching network-vif-plugged-a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 712.260512] env[62204]: WARNING nova.compute.manager [req-674e54bf-a992-4455-ba29-dc862c00b1a2 req-218af6fe-c287-4df0-9aa7-bc6b0f5dfa0b service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Received unexpected event network-vif-plugged-a71dff43-7907-4305-b9cc-260d439fcaab for instance with vm_state building and task_state spawning. [ 712.305318] env[62204]: DEBUG nova.network.neutron [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Successfully updated port: a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 712.392752] env[62204]: DEBUG nova.network.neutron [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updated VIF entry in instance network info cache for port ccf86a68-c525-4b8b-940f-b0a08f2d3831. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 712.393111] env[62204]: DEBUG nova.network.neutron [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.474437] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.477074] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.517815] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199476, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066018} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.518098] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.518872] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91edc48-a5c0-48b0-8d6b-553a66e60c65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.542515] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 48fe8f43-4ab9-41de-9b81-35b4438585ea/48fe8f43-4ab9-41de-9b81-35b4438585ea.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.545365] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9540e5ec-f07f-40c5-87df-1b7f639e150e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.565801] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 712.565801] env[62204]: value = "task-1199480" [ 712.565801] env[62204]: _type = "Task" [ 712.565801] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.577113] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199480, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.620843] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8cd732a6-cd3f-40ae-835c-d51856b025bb tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "432115aa-8999-40fe-a0cb-31433575c912" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.944s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.650972] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.699413] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199479, 'name': CreateVM_Task, 'duration_secs': 0.417446} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.699567] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 712.700356] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.700524] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.700836] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 712.701136] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09af4cec-70c7-4039-9d17-be87029db786 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.705709] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 712.705709] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5296e145-5aa7-98e1-05df-0269c69f27a2" [ 712.705709] env[62204]: _type = "Task" [ 712.705709] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.714268] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5296e145-5aa7-98e1-05df-0269c69f27a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.807557] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "refresh_cache-19326d9f-5f3a-4756-874f-d4d3ce25f8e8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.808214] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquired lock "refresh_cache-19326d9f-5f3a-4756-874f-d4d3ce25f8e8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.808214] env[62204]: DEBUG nova.network.neutron [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 712.895947] env[62204]: DEBUG oslo_concurrency.lockutils [req-67be5d14-ef60-47f8-a37c-ea1047a66541 req-3f2a55f4-4500-4d04-ac85-4448af45245f service nova] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.078151] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199480, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.123950] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 713.157028] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Releasing lock "refresh_cache-0ab619ea-755b-4d71-9c12-0eeda0b42a39" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.157411] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 713.157640] env[62204]: DEBUG nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.157833] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 713.185518] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.217752] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5296e145-5aa7-98e1-05df-0269c69f27a2, 'name': SearchDatastore_Task, 'duration_secs': 0.009481} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.220465] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.220712] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 713.221459] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.221459] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.221459] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.221728] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42ec75e5-8149-4d91-b6f3-04eef108ac79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.230038] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.230146] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 713.230955] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba6fdd2-b121-4c8f-ae59-d08f47f5f432 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.239446] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 713.239446] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b87f10-0d13-04de-4d5a-eb466afd04d0" [ 713.239446] env[62204]: _type = "Task" [ 713.239446] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.250449] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b87f10-0d13-04de-4d5a-eb466afd04d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.359272] env[62204]: DEBUG nova.network.neutron [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.378368] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e1aa11-7c9a-490a-a9b7-3fabd979fabc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.385774] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76614d18-660d-47fd-b186-2fe8203934c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.418800] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff84277c-8b24-472d-bb68-20d5039a40c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.426425] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03954bca-a108-42d2-a954-37b90475cd87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.439823] env[62204]: DEBUG nova.compute.provider_tree [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.528784] env[62204]: DEBUG nova.network.neutron [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Updating instance_info_cache with network_info: [{"id": "a71dff43-7907-4305-b9cc-260d439fcaab", "address": "fa:16:3e:53:e2:ee", "network": {"id": "af18a522-3072-4d9e-85ae-fbc67e5352cc", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-51529231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b4bc82065ed4b13bafddd3a3030652c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa71dff43-79", "ovs_interfaceid": "a71dff43-7907-4305-b9cc-260d439fcaab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.577292] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199480, 'name': ReconfigVM_Task, 'duration_secs': 0.797839} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.577567] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 48fe8f43-4ab9-41de-9b81-35b4438585ea/48fe8f43-4ab9-41de-9b81-35b4438585ea.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 713.578618] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31d8404f-8d49-4002-8814-a3afd22b5603 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.584803] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 713.584803] env[62204]: value = "task-1199481" [ 713.584803] env[62204]: _type = "Task" [ 713.584803] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.592186] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199481, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.644047] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.688206] env[62204]: DEBUG nova.network.neutron [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.749461] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b87f10-0d13-04de-4d5a-eb466afd04d0, 'name': SearchDatastore_Task, 'duration_secs': 0.009044} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.750192] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b2465a0-7ed9-45ae-8620-9e1bc7144206 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.755071] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 713.755071] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529439c5-ae65-2a1a-fcde-10b6a18f4e00" [ 713.755071] env[62204]: _type = "Task" [ 713.755071] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.762996] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529439c5-ae65-2a1a-fcde-10b6a18f4e00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.942716] env[62204]: DEBUG nova.scheduler.client.report [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 714.034019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Releasing lock "refresh_cache-19326d9f-5f3a-4756-874f-d4d3ce25f8e8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.034019] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Instance network_info: |[{"id": "a71dff43-7907-4305-b9cc-260d439fcaab", "address": "fa:16:3e:53:e2:ee", "network": {"id": "af18a522-3072-4d9e-85ae-fbc67e5352cc", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-51529231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b4bc82065ed4b13bafddd3a3030652c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa71dff43-79", "ovs_interfaceid": "a71dff43-7907-4305-b9cc-260d439fcaab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 714.034213] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:e2:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a71dff43-7907-4305-b9cc-260d439fcaab', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.040826] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Creating folder: Project (5b4bc82065ed4b13bafddd3a3030652c). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 714.041270] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7795ef24-f964-4727-abb0-00385ebe661d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.053081] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Created folder: Project (5b4bc82065ed4b13bafddd3a3030652c) in parent group-v259933. [ 714.053081] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Creating folder: Instances. Parent ref: group-v259966. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 714.053215] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4d7b7e2-e31c-44e6-90bd-020a9bb3cb5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.061238] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Created folder: Instances in parent group-v259966. [ 714.061488] env[62204]: DEBUG oslo.service.loopingcall [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 714.061675] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 714.061887] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd2b3389-74a6-4263-8d46-2f0032612a26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.080678] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.080678] env[62204]: value = "task-1199484" [ 714.080678] env[62204]: _type = "Task" [ 714.080678] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.088622] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199484, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.096103] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199481, 'name': Rename_Task, 'duration_secs': 0.151527} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.096390] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 714.096616] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ca4d5a1-1beb-4b31-95b8-baeb0f9fac6e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.102598] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 714.102598] env[62204]: value = "task-1199485" [ 714.102598] env[62204]: _type = "Task" [ 714.102598] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.115541] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.191435] env[62204]: INFO nova.compute.manager [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] [instance: 0ab619ea-755b-4d71-9c12-0eeda0b42a39] Took 1.03 seconds to deallocate network for instance. [ 714.266243] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529439c5-ae65-2a1a-fcde-10b6a18f4e00, 'name': SearchDatastore_Task, 'duration_secs': 0.009823} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.266532] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.266812] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6/0a4a432d-a71a-4da7-be90-25dcec5a64c6.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 714.267125] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-909d92de-a4a3-4e2a-87ce-9e8b0f100a9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.274357] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 714.274357] env[62204]: value = "task-1199486" [ 714.274357] env[62204]: _type = "Task" [ 714.274357] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.283389] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.346061] env[62204]: DEBUG nova.compute.manager [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Received event network-changed-a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 714.346061] env[62204]: DEBUG nova.compute.manager [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Refreshing instance network info cache due to event network-changed-a71dff43-7907-4305-b9cc-260d439fcaab. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 714.346248] env[62204]: DEBUG oslo_concurrency.lockutils [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] Acquiring lock "refresh_cache-19326d9f-5f3a-4756-874f-d4d3ce25f8e8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.346289] env[62204]: DEBUG oslo_concurrency.lockutils [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] Acquired lock "refresh_cache-19326d9f-5f3a-4756-874f-d4d3ce25f8e8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.346498] env[62204]: DEBUG nova.network.neutron [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Refreshing network info cache for port a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 714.448032] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.448682] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 714.453248] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 15.371s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.453248] env[62204]: DEBUG nova.objects.instance [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 714.594149] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199484, 'name': CreateVM_Task, 'duration_secs': 0.311944} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.594528] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 714.595572] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.596020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.596176] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 714.596442] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63d89970-4e13-4682-8db8-a7821698901d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.604254] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 714.604254] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52de6723-674d-e5a1-0ed2-9b5facb5dec3" [ 714.604254] env[62204]: _type = "Task" [ 714.604254] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.618672] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199485, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.622565] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52de6723-674d-e5a1-0ed2-9b5facb5dec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.783347] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199486, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491282} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.783615] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6/0a4a432d-a71a-4da7-be90-25dcec5a64c6.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 714.783826] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 714.784112] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-052d0918-6533-44df-afbf-0a623b8dcecd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.790138] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 714.790138] env[62204]: value = "task-1199487" [ 714.790138] env[62204]: _type = "Task" [ 714.790138] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.798335] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.960202] env[62204]: DEBUG nova.compute.utils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 714.964540] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 714.964706] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 715.016149] env[62204]: DEBUG nova.policy [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a29d584772d84abe8d36db8dffcc0729', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc9c47a4209c4f158e39dd04afd17fa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 715.117182] env[62204]: DEBUG oslo_vmware.api [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199485, 'name': PowerOnVM_Task, 'duration_secs': 0.574237} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.117834] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 715.118056] env[62204]: INFO nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Took 8.56 seconds to spawn the instance on the hypervisor. [ 715.118247] env[62204]: DEBUG nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 715.118972] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5d0e15-c77a-410e-8938-998fa19eaf22 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.125985] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52de6723-674d-e5a1-0ed2-9b5facb5dec3, 'name': SearchDatastore_Task, 'duration_secs': 0.018133} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.126642] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.126895] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.127137] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.127332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.127516] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.127816] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebbcbd48-0ee1-4631-b90a-3582cfe8cee3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.142442] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.142625] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.143400] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac27f4e6-3a72-48a5-8148-46ee31003c40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.150181] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 715.150181] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c5061-e887-0fa0-16e8-7e2c2e186c9b" [ 715.150181] env[62204]: _type = "Task" [ 715.150181] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.161750] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c5061-e887-0fa0-16e8-7e2c2e186c9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.222874] env[62204]: DEBUG nova.network.neutron [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Updated VIF entry in instance network info cache for port a71dff43-7907-4305-b9cc-260d439fcaab. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 715.224079] env[62204]: DEBUG nova.network.neutron [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Updating instance_info_cache with network_info: [{"id": "a71dff43-7907-4305-b9cc-260d439fcaab", "address": "fa:16:3e:53:e2:ee", "network": {"id": "af18a522-3072-4d9e-85ae-fbc67e5352cc", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-51529231-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b4bc82065ed4b13bafddd3a3030652c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa71dff43-79", "ovs_interfaceid": "a71dff43-7907-4305-b9cc-260d439fcaab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.229934] env[62204]: INFO nova.scheduler.client.report [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Deleted allocations for instance 0ab619ea-755b-4d71-9c12-0eeda0b42a39 [ 715.288014] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Successfully created port: 929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.303110] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066161} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.306581] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.306581] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ab80b9-c8f6-42e6-878f-21d5b24e30ea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.338384] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6/0a4a432d-a71a-4da7-be90-25dcec5a64c6.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.339743] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-319946fc-ea66-4988-8e80-4b2e8e19bf44 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.359290] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 715.359290] env[62204]: value = "task-1199488" [ 715.359290] env[62204]: _type = "Task" [ 715.359290] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.367885] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.465746] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 715.469022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8e263fc1-3995-458a-8528-9ca0a4623cc5 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.470061] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.365s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.558624] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Successfully created port: 962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.649634] env[62204]: INFO nova.compute.manager [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Took 29.95 seconds to build instance. [ 715.662566] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c5061-e887-0fa0-16e8-7e2c2e186c9b, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.663658] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe1ed13c-6acd-45e2-bf6b-fa9d85cba9e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.670349] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 715.670349] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5271798c-367d-4e19-35ce-f91c396e523e" [ 715.670349] env[62204]: _type = "Task" [ 715.670349] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.684248] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5271798c-367d-4e19-35ce-f91c396e523e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.726974] env[62204]: DEBUG oslo_concurrency.lockutils [req-decb3476-1cc8-499e-b03f-5c7fb884f20c req-17ff9ec4-4dc3-4674-85e3-912d6e2af7a0 service nova] Releasing lock "refresh_cache-19326d9f-5f3a-4756-874f-d4d3ce25f8e8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.740797] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5947b60b-fb7c-44e8-a511-5494bc6b99be tempest-ServerTagsTestJSON-652264764 tempest-ServerTagsTestJSON-652264764-project-member] Lock "0ab619ea-755b-4d71-9c12-0eeda0b42a39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 157.722s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.820826] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Successfully created port: 4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.874238] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199488, 'name': ReconfigVM_Task, 'duration_secs': 0.2983} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.874571] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6/0a4a432d-a71a-4da7-be90-25dcec5a64c6.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 715.875250] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79148e19-1595-47bb-afa0-fb34a21b5550 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.882758] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 715.882758] env[62204]: value = "task-1199489" [ 715.882758] env[62204]: _type = "Task" [ 715.882758] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.892234] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199489, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.151324] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5a3d27e-d2ce-4474-8a32-8955d88d026c tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.958s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.181895] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5271798c-367d-4e19-35ce-f91c396e523e, 'name': SearchDatastore_Task, 'duration_secs': 0.020927} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.182438] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.182438] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 19326d9f-5f3a-4756-874f-d4d3ce25f8e8/19326d9f-5f3a-4756-874f-d4d3ce25f8e8.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 716.183048] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98709af1-89b5-4249-b366-6c2c180cb1c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.193736] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 716.193736] env[62204]: value = "task-1199490" [ 716.193736] env[62204]: _type = "Task" [ 716.193736] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.206422] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199490, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.243556] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 716.403714] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199489, 'name': Rename_Task, 'duration_secs': 0.157766} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.404428] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 716.404696] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1a42155-4d46-4ab3-bd2f-baa246328678 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.417253] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 716.417253] env[62204]: value = "task-1199491" [ 716.417253] env[62204]: _type = "Task" [ 716.417253] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.427300] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.469419] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4671098-dd42-4331-bd7f-e74f2c8759e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.477467] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed46221-feb0-49cc-9e0e-eff493d533be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.481844] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 716.519516] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d2207c-d2e4-443d-b5f3-f832fb73dfd5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.527878] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 716.528139] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 716.528296] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 716.528476] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 716.528621] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 716.528765] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 716.528971] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 716.529146] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 716.529313] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 716.529475] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 716.529646] env[62204]: DEBUG nova.virt.hardware [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 716.530545] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c073a6-4bcb-4dba-890e-59e902cfb254 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.537294] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e0df80-3c56-40b1-ab3f-3654b65b44bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.545243] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd454f90-c869-41b2-8879-1d21d0e79533 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.557532] env[62204]: DEBUG nova.compute.provider_tree [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.654882] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 716.710873] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199490, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465349} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.710873] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 19326d9f-5f3a-4756-874f-d4d3ce25f8e8/19326d9f-5f3a-4756-874f-d4d3ce25f8e8.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 716.710873] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 716.710873] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44ecbd20-efa5-4229-9d9f-aab40c819018 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.718670] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 716.718670] env[62204]: value = "task-1199492" [ 716.718670] env[62204]: _type = "Task" [ 716.718670] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.729193] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.763718] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.932721] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199491, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.064107] env[62204]: DEBUG nova.scheduler.client.report [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.184421] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.232671] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056517} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.232953] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 717.233989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6153ea-534f-4677-b3b6-8b246ae71c94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.260290] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 19326d9f-5f3a-4756-874f-d4d3ce25f8e8/19326d9f-5f3a-4756-874f-d4d3ce25f8e8.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.260290] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bb982c8-950d-42c0-8902-dc558a2c3848 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.282834] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 717.282834] env[62204]: value = "task-1199493" [ 717.282834] env[62204]: _type = "Task" [ 717.282834] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.290372] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199493, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.427547] env[62204]: DEBUG nova.compute.manager [req-a48434d1-ec19-48b9-b16e-31d14343ddab req-1b09d18f-82fe-41cd-a3a6-401f9e49679a service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-vif-plugged-929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.427767] env[62204]: DEBUG oslo_concurrency.lockutils [req-a48434d1-ec19-48b9-b16e-31d14343ddab req-1b09d18f-82fe-41cd-a3a6-401f9e49679a service nova] Acquiring lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.427976] env[62204]: DEBUG oslo_concurrency.lockutils [req-a48434d1-ec19-48b9-b16e-31d14343ddab req-1b09d18f-82fe-41cd-a3a6-401f9e49679a service nova] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.428155] env[62204]: DEBUG oslo_concurrency.lockutils [req-a48434d1-ec19-48b9-b16e-31d14343ddab req-1b09d18f-82fe-41cd-a3a6-401f9e49679a service nova] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.428416] env[62204]: DEBUG nova.compute.manager [req-a48434d1-ec19-48b9-b16e-31d14343ddab req-1b09d18f-82fe-41cd-a3a6-401f9e49679a service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] No waiting events found dispatching network-vif-plugged-929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 717.428525] env[62204]: WARNING nova.compute.manager [req-a48434d1-ec19-48b9-b16e-31d14343ddab req-1b09d18f-82fe-41cd-a3a6-401f9e49679a service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received unexpected event network-vif-plugged-929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd for instance with vm_state building and task_state spawning. [ 717.435071] env[62204]: DEBUG oslo_vmware.api [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199491, 'name': PowerOnVM_Task, 'duration_secs': 0.546877} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.435259] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 717.435451] env[62204]: INFO nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 8.21 seconds to spawn the instance on the hypervisor. [ 717.435624] env[62204]: DEBUG nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 717.436467] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe52284d-839d-4a6f-a05d-f6fb05a5d904 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.464508] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Successfully updated port: 929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 717.571029] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.097s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.571029] env[62204]: ERROR nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Traceback (most recent call last): [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self.driver.spawn(context, instance, image_meta, [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.571029] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] vm_ref = self.build_virtual_machine(instance, [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] for vif in network_info: [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] return self._sync_wrapper(fn, *args, **kwargs) [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self.wait() [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self[:] = self._gt.wait() [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] return self._exit_event.wait() [ 717.571523] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] current.throw(*self._exc) [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] result = function(*args, **kwargs) [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] return func(*args, **kwargs) [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] raise e [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] nwinfo = self.network_api.allocate_for_instance( [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] created_port_ids = self._update_ports_for_instance( [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.571943] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] with excutils.save_and_reraise_exception(): [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] self.force_reraise() [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] raise self.value [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] updated_port = self._update_port( [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] _ensure_no_port_binding_failure(port) [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] raise exception.PortBindingFailed(port_id=port['id']) [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] nova.exception.PortBindingFailed: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. [ 717.572326] env[62204]: ERROR nova.compute.manager [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] [ 717.572670] env[62204]: DEBUG nova.compute.utils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 717.576034] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.044s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.576034] env[62204]: INFO nova.compute.claims [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.577957] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Build of instance 6665383b-f5fd-4fdf-b625-86cfb0869419 was re-scheduled: Binding failed for port c8168e4a-cf0f-419f-b869-ee73a1cf8aba, please check neutron logs for more information. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 717.578651] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Unplugging VIFs for instance {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 717.578932] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.579440] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquired lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.579671] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.793604] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199493, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.963268] env[62204]: INFO nova.compute.manager [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 27.83 seconds to build instance. [ 718.121176] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.293333] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199493, 'name': ReconfigVM_Task, 'duration_secs': 0.685042} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.293625] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 19326d9f-5f3a-4756-874f-d4d3ce25f8e8/19326d9f-5f3a-4756-874f-d4d3ce25f8e8.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.294294] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-138e50cf-6267-4b5c-9b53-98ae8fdd416d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.300962] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 718.300962] env[62204]: value = "task-1199494" [ 718.300962] env[62204]: _type = "Task" [ 718.300962] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.309346] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199494, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.315589] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.471245] env[62204]: DEBUG oslo_concurrency.lockutils [None req-36f027a7-5764-4495-8ed5-f2ee83f51223 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 136.372s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.818669] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199494, 'name': Rename_Task, 'duration_secs': 0.405524} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.818943] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.819411] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Releasing lock "refresh_cache-6665383b-f5fd-4fdf-b625-86cfb0869419" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.819606] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62204) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 718.819949] env[62204]: DEBUG nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 718.819949] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 718.821478] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cf13350-3d3c-4ed5-b02f-7bc388d9b8a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.830546] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 718.830546] env[62204]: value = "task-1199495" [ 718.830546] env[62204]: _type = "Task" [ 718.830546] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.838594] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.848702] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.974841] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 719.066851] env[62204]: DEBUG nova.compute.manager [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Received event network-changed-ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.066851] env[62204]: DEBUG nova.compute.manager [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Refreshing instance network info cache due to event network-changed-ccf86a68-c525-4b8b-940f-b0a08f2d3831. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 719.067155] env[62204]: DEBUG oslo_concurrency.lockutils [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.067197] env[62204]: DEBUG oslo_concurrency.lockutils [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.067356] env[62204]: DEBUG nova.network.neutron [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Refreshing network info cache for port ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 719.071021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b3584f-1c56-4652-b437-add3167705ea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.080236] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5575112f-194b-49d5-80cd-dc3bb067b579 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.119254] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f7b273-b6e9-40da-83fe-92b79e7b1270 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.129255] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb508f1-495b-43fa-8730-529a805aa2b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.144142] env[62204]: DEBUG nova.compute.provider_tree [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 719.340968] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199495, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.352064] env[62204]: DEBUG nova.network.neutron [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.449829] env[62204]: DEBUG nova.compute.manager [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-changed-929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.450173] env[62204]: DEBUG nova.compute.manager [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Refreshing instance network info cache due to event network-changed-929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 719.450371] env[62204]: DEBUG oslo_concurrency.lockutils [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] Acquiring lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 719.450601] env[62204]: DEBUG oslo_concurrency.lockutils [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] Acquired lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.450796] env[62204]: DEBUG nova.network.neutron [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Refreshing network info cache for port 929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 719.497041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.617971] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Successfully updated port: 962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.647639] env[62204]: DEBUG nova.scheduler.client.report [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.842892] env[62204]: DEBUG oslo_vmware.api [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199495, 'name': PowerOnVM_Task, 'duration_secs': 0.704744} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.843519] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 719.843816] env[62204]: INFO nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Took 7.94 seconds to spawn the instance on the hypervisor. [ 719.844016] env[62204]: DEBUG nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 719.844800] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb7eb6f-e35f-431b-9ee3-b28269f86f06 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.856381] env[62204]: INFO nova.compute.manager [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 6665383b-f5fd-4fdf-b625-86cfb0869419] Took 1.04 seconds to deallocate network for instance. [ 719.872073] env[62204]: DEBUG nova.network.neutron [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updated VIF entry in instance network info cache for port ccf86a68-c525-4b8b-940f-b0a08f2d3831. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 719.872505] env[62204]: DEBUG nova.network.neutron [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.016510] env[62204]: DEBUG nova.network.neutron [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 720.143155] env[62204]: DEBUG nova.network.neutron [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.152549] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.152549] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 720.156452] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.156s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.157902] env[62204]: INFO nova.compute.claims [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.375514] env[62204]: DEBUG oslo_concurrency.lockutils [req-603c5542-55cd-472c-85e7-6aaae1f98462 req-6f1ea508-5cf6-4e3b-b42f-e15d0676f0a5 service nova] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.376168] env[62204]: INFO nova.compute.manager [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Took 28.05 seconds to build instance. [ 720.646603] env[62204]: DEBUG oslo_concurrency.lockutils [req-a4e8c95d-ccc2-4e87-99b6-7e185f56c2eb req-a620c992-5bbf-4cee-b396-5bbd7514486d service nova] Releasing lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 720.664886] env[62204]: DEBUG nova.compute.utils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.672085] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 720.672085] env[62204]: DEBUG nova.network.neutron [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 720.728348] env[62204]: DEBUG nova.policy [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbeceb600e314287919147317b6e57d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f3b9a6c4b7e48c2bf5e7b4323185934', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.877710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48ed61e4-4b52-4b9e-8f84-9a20135cc864 tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.865s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 720.917394] env[62204]: INFO nova.scheduler.client.report [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleted allocations for instance 6665383b-f5fd-4fdf-b625-86cfb0869419 [ 721.086565] env[62204]: DEBUG nova.network.neutron [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Successfully created port: f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 721.171384] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.380313] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 721.429804] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a9c00148-4d58-43b4-966e-1ac0a83c30c9 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "6665383b-f5fd-4fdf-b625-86cfb0869419" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 142.349s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.662086] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8b0be2-c8d5-44a5-8081-08f35ceaf1fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.668140] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Successfully updated port: 4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.679970] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d463ff11-506d-4914-92a6-2f864e3002cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.711223] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19cadf0-e1d8-4167-bd78-5062d3d4d1a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.718758] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6129c6-2443-41ec-9ce5-fc2b610b88ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.734112] env[62204]: DEBUG nova.compute.provider_tree [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.740253] env[62204]: DEBUG nova.compute.manager [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-vif-plugged-962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.740541] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] Acquiring lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.740715] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.740822] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.741025] env[62204]: DEBUG nova.compute.manager [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] No waiting events found dispatching network-vif-plugged-962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 721.741177] env[62204]: WARNING nova.compute.manager [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received unexpected event network-vif-plugged-962bb0fb-5bee-480f-ae79-b9ba36189ba1 for instance with vm_state building and task_state spawning. [ 721.741316] env[62204]: DEBUG nova.compute.manager [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-changed-962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.741463] env[62204]: DEBUG nova.compute.manager [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Refreshing instance network info cache due to event network-changed-962bb0fb-5bee-480f-ae79-b9ba36189ba1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 721.741638] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] Acquiring lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.742480] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] Acquired lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.742480] env[62204]: DEBUG nova.network.neutron [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Refreshing network info cache for port 962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 721.912766] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.935171] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 722.171969] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.185875] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 722.212542] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 722.212833] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 722.213015] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 722.213213] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 722.213360] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 722.213502] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 722.213705] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 722.213861] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 722.214030] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 722.214195] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 722.214362] env[62204]: DEBUG nova.virt.hardware [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 722.215270] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a006de3-f23a-47aa-9002-0e3d65dd957d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.223543] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aad10e-6f65-4ad8-ae47-9dd8444ddf33 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.237723] env[62204]: DEBUG nova.scheduler.client.report [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 722.281128] env[62204]: DEBUG nova.network.neutron [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.368197] env[62204]: DEBUG nova.network.neutron [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.411929] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "48fe8f43-4ab9-41de-9b81-35b4438585ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.411929] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.412206] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "48fe8f43-4ab9-41de-9b81-35b4438585ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.413030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.413030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.415028] env[62204]: INFO nova.compute.manager [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Terminating instance [ 722.420441] env[62204]: DEBUG nova.compute.manager [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 722.420441] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.420441] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5ae503-74a0-4df3-ab80-c88c6a3879d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.429765] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 722.430092] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c15a59e-0725-466c-9070-6b9738fc1f96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.442021] env[62204]: DEBUG oslo_vmware.api [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 722.442021] env[62204]: value = "task-1199496" [ 722.442021] env[62204]: _type = "Task" [ 722.442021] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.455565] env[62204]: DEBUG oslo_vmware.api [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.464105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.746023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.589s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.746023] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 722.747342] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.282s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.747673] env[62204]: DEBUG nova.objects.instance [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lazy-loading 'resources' on Instance uuid ce74983e-8347-425c-967a-6a78a7daa701 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 722.872336] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1d51fac-7eb6-4eab-9daf-402fa284848c req-ac1b79ac-073f-43d9-856d-9720db3eff6c service nova] Releasing lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.872835] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.872987] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 722.951577] env[62204]: DEBUG oslo_vmware.api [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199496, 'name': PowerOffVM_Task, 'duration_secs': 0.189853} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.951853] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 722.952011] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 722.952272] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36759281-ed6d-46a3-8df4-0f5db0c8b72c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.976667] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.977098] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.977237] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.977491] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 722.977761] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 722.981783] env[62204]: INFO nova.compute.manager [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Terminating instance [ 722.982093] env[62204]: DEBUG nova.compute.manager [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 722.982300] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.983553] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904be59b-3a0c-430d-ad32-228afa362a07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.991484] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 722.991687] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8839879-5207-4749-a088-d724f55a567c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.997742] env[62204]: DEBUG oslo_vmware.api [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 722.997742] env[62204]: value = "task-1199498" [ 722.997742] env[62204]: _type = "Task" [ 722.997742] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.007577] env[62204]: DEBUG oslo_vmware.api [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.045987] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 723.046271] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 723.046609] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleting the datastore file [datastore1] 48fe8f43-4ab9-41de-9b81-35b4438585ea {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.046735] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5ee3557-7fff-4e3a-b889-fcf8915f083b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.053034] env[62204]: DEBUG oslo_vmware.api [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 723.053034] env[62204]: value = "task-1199499" [ 723.053034] env[62204]: _type = "Task" [ 723.053034] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.057352] env[62204]: DEBUG nova.network.neutron [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Successfully updated port: f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 723.065300] env[62204]: DEBUG oslo_vmware.api [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199499, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.080652] env[62204]: DEBUG nova.compute.manager [req-1c02fb65-8c63-4452-b4a7-9b5e57d49867 req-c6e7ace6-15b4-4f91-9055-bdab71855515 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Received event network-vif-plugged-f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.080924] env[62204]: DEBUG oslo_concurrency.lockutils [req-1c02fb65-8c63-4452-b4a7-9b5e57d49867 req-c6e7ace6-15b4-4f91-9055-bdab71855515 service nova] Acquiring lock "12656a79-a836-452c-8f94-c8e142c9ec2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.081215] env[62204]: DEBUG oslo_concurrency.lockutils [req-1c02fb65-8c63-4452-b4a7-9b5e57d49867 req-c6e7ace6-15b4-4f91-9055-bdab71855515 service nova] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.081429] env[62204]: DEBUG oslo_concurrency.lockutils [req-1c02fb65-8c63-4452-b4a7-9b5e57d49867 req-c6e7ace6-15b4-4f91-9055-bdab71855515 service nova] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.082133] env[62204]: DEBUG nova.compute.manager [req-1c02fb65-8c63-4452-b4a7-9b5e57d49867 req-c6e7ace6-15b4-4f91-9055-bdab71855515 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] No waiting events found dispatching network-vif-plugged-f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 723.082133] env[62204]: WARNING nova.compute.manager [req-1c02fb65-8c63-4452-b4a7-9b5e57d49867 req-c6e7ace6-15b4-4f91-9055-bdab71855515 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Received unexpected event network-vif-plugged-f0ae2eb6-f038-4347-b9bf-573e69ee42d2 for instance with vm_state building and task_state spawning. [ 723.253450] env[62204]: DEBUG nova.compute.utils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 723.255022] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 723.255213] env[62204]: DEBUG nova.network.neutron [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 723.312054] env[62204]: DEBUG nova.policy [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '615cb7d57089428d88e7ba8a2b39b111', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96fd673209d940caa3186ee212162259', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 723.418132] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.507815] env[62204]: DEBUG oslo_vmware.api [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199498, 'name': PowerOffVM_Task, 'duration_secs': 0.208711} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.510238] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 723.510414] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 723.510832] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-154fcb26-aa7d-423b-9619-8176a9349ada {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.571028] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "refresh_cache-12656a79-a836-452c-8f94-c8e142c9ec2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.571028] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquired lock "refresh_cache-12656a79-a836-452c-8f94-c8e142c9ec2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.571028] env[62204]: DEBUG nova.network.neutron [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 723.571028] env[62204]: DEBUG oslo_vmware.api [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.315159} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.571028] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.572108] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 723.572108] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.572108] env[62204]: INFO nova.compute.manager [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Took 1.15 seconds to destroy the instance on the hypervisor. [ 723.572108] env[62204]: DEBUG oslo.service.loopingcall [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.572108] env[62204]: DEBUG nova.compute.manager [-] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 723.572108] env[62204]: DEBUG nova.network.neutron [-] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 723.581876] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 723.581876] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 723.581876] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Deleting the datastore file [datastore2] 19326d9f-5f3a-4756-874f-d4d3ce25f8e8 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.583973] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4e9de5c-fa3f-4d31-b530-72eb63f7caec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.596022] env[62204]: DEBUG oslo_vmware.api [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for the task: (returnval){ [ 723.596022] env[62204]: value = "task-1199501" [ 723.596022] env[62204]: _type = "Task" [ 723.596022] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.604046] env[62204]: DEBUG oslo_vmware.api [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.668813] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205ac3e2-6350-4fe4-95e6-3a818912f6a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.678021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7282813-e109-495e-9bcd-2cb1914d1de7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.717310] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede4131a-4eb0-4bce-8ac9-4eaea4500a8e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.725684] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6cd19d-ccb4-47c6-ac11-e1c7d271b33d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.747026] env[62204]: DEBUG nova.compute.provider_tree [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.758548] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 723.817999] env[62204]: DEBUG nova.network.neutron [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Successfully created port: 70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 723.850771] env[62204]: DEBUG nova.compute.manager [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-vif-plugged-4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.850771] env[62204]: DEBUG oslo_concurrency.lockutils [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] Acquiring lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.851033] env[62204]: DEBUG oslo_concurrency.lockutils [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.851754] env[62204]: DEBUG oslo_concurrency.lockutils [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.851754] env[62204]: DEBUG nova.compute.manager [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] No waiting events found dispatching network-vif-plugged-4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 723.851754] env[62204]: WARNING nova.compute.manager [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received unexpected event network-vif-plugged-4688967a-f972-4674-959a-9c23ad7c85d8 for instance with vm_state building and task_state spawning. [ 723.851754] env[62204]: DEBUG nova.compute.manager [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-changed-4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.851754] env[62204]: DEBUG nova.compute.manager [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Refreshing instance network info cache due to event network-changed-4688967a-f972-4674-959a-9c23ad7c85d8. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 723.852035] env[62204]: DEBUG oslo_concurrency.lockutils [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] Acquiring lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.104839] env[62204]: DEBUG oslo_vmware.api [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Task: {'id': task-1199501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434125} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.105188] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.105329] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 724.105502] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.105680] env[62204]: INFO nova.compute.manager [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 724.106031] env[62204]: DEBUG oslo.service.loopingcall [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.106153] env[62204]: DEBUG nova.compute.manager [-] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.106253] env[62204]: DEBUG nova.network.neutron [-] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.141436] env[62204]: DEBUG nova.network.neutron [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.182256] env[62204]: DEBUG nova.network.neutron [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updating instance_info_cache with network_info: [{"id": "929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd", "address": "fa:16:3e:da:dc:ea", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap929b6a1e-1d", "ovs_interfaceid": "929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "address": "fa:16:3e:06:6c:48", "network": {"id": "956c5f5f-4fa4-45e7-86c3-29574bedc6bb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-823685576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962bb0fb-5b", "ovs_interfaceid": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4688967a-f972-4674-959a-9c23ad7c85d8", "address": "fa:16:3e:92:1c:fc", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4688967a-f9", "ovs_interfaceid": "4688967a-f972-4674-959a-9c23ad7c85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.250921] env[62204]: DEBUG nova.scheduler.client.report [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 724.465882] env[62204]: DEBUG nova.network.neutron [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Updating instance_info_cache with network_info: [{"id": "f0ae2eb6-f038-4347-b9bf-573e69ee42d2", "address": "fa:16:3e:d0:ed:a1", "network": {"id": "52af7b22-7224-43af-ae54-ea0374fed3b6", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1070359216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3b9a6c4b7e48c2bf5e7b4323185934", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ae2eb6-f0", "ovs_interfaceid": "f0ae2eb6-f038-4347-b9bf-573e69ee42d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.660944] env[62204]: DEBUG nova.network.neutron [-] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.684969] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Releasing lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.685531] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance network_info: |[{"id": "929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd", "address": "fa:16:3e:da:dc:ea", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap929b6a1e-1d", "ovs_interfaceid": "929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "address": "fa:16:3e:06:6c:48", "network": {"id": "956c5f5f-4fa4-45e7-86c3-29574bedc6bb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-823685576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962bb0fb-5b", "ovs_interfaceid": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4688967a-f972-4674-959a-9c23ad7c85d8", "address": "fa:16:3e:92:1c:fc", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4688967a-f9", "ovs_interfaceid": "4688967a-f972-4674-959a-9c23ad7c85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 724.685944] env[62204]: DEBUG oslo_concurrency.lockutils [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] Acquired lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.686239] env[62204]: DEBUG nova.network.neutron [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Refreshing network info cache for port 4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 724.687724] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:dc:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:6c:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bab6a6c3-1c5c-4776-b21b-dec21196d702', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '962bb0fb-5bee-480f-ae79-b9ba36189ba1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:1c:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '111a2767-1b06-4fe5-852b-40c9b5a699fd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4688967a-f972-4674-959a-9c23ad7c85d8', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.705093] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Creating folder: Project (fc9c47a4209c4f158e39dd04afd17fa5). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.709340] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-112d9907-9844-4953-bcb7-3a14603a6154 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.721856] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Created folder: Project (fc9c47a4209c4f158e39dd04afd17fa5) in parent group-v259933. [ 724.722161] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Creating folder: Instances. Parent ref: group-v259969. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.722495] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-883f2cb9-8800-44ec-93c1-5dc5a3ccf9a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.732264] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Created folder: Instances in parent group-v259969. [ 724.732594] env[62204]: DEBUG oslo.service.loopingcall [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.732867] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.733182] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a06d141c-6034-4592-8ede-f8fb5f55b95e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.760383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.013s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.762911] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.715s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.763758] env[62204]: INFO nova.compute.claims [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.772330] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 724.775016] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 724.775016] env[62204]: value = "task-1199504" [ 724.775016] env[62204]: _type = "Task" [ 724.775016] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.784193] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199504, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.785557] env[62204]: INFO nova.scheduler.client.report [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleted allocations for instance ce74983e-8347-425c-967a-6a78a7daa701 [ 724.803600] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.803876] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.804056] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.804745] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.804745] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.804745] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.804745] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.805040] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.805040] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.805223] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.805402] env[62204]: DEBUG nova.virt.hardware [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.806247] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e5fe08-07b7-47c7-9e2a-fcc7ca882fa7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.815387] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d99844-e25e-484b-8da6-89f53dd4379e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.883161] env[62204]: DEBUG nova.network.neutron [-] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.969507] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Releasing lock "refresh_cache-12656a79-a836-452c-8f94-c8e142c9ec2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.970033] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Instance network_info: |[{"id": "f0ae2eb6-f038-4347-b9bf-573e69ee42d2", "address": "fa:16:3e:d0:ed:a1", "network": {"id": "52af7b22-7224-43af-ae54-ea0374fed3b6", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1070359216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3b9a6c4b7e48c2bf5e7b4323185934", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ae2eb6-f0", "ovs_interfaceid": "f0ae2eb6-f038-4347-b9bf-573e69ee42d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 724.970462] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:ed:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f0ae2eb6-f038-4347-b9bf-573e69ee42d2', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 724.977897] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Creating folder: Project (3f3b9a6c4b7e48c2bf5e7b4323185934). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.978264] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6d70431-7b8c-4cfe-ac2f-ffc4c613c0e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.988871] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Created folder: Project (3f3b9a6c4b7e48c2bf5e7b4323185934) in parent group-v259933. [ 724.989056] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Creating folder: Instances. Parent ref: group-v259972. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 724.989293] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62655bf2-c3d4-41d2-9cc9-50cdbb2c04f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.998570] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Created folder: Instances in parent group-v259972. [ 724.998785] env[62204]: DEBUG oslo.service.loopingcall [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.998978] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 724.999177] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-689de81f-0a91-491e-b22b-b06ee390052c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.020160] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.020160] env[62204]: value = "task-1199507" [ 725.020160] env[62204]: _type = "Task" [ 725.020160] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.027853] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199507, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.168493] env[62204]: INFO nova.compute.manager [-] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Took 1.59 seconds to deallocate network for instance. [ 725.281589] env[62204]: DEBUG nova.compute.manager [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Received event network-changed-f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.281733] env[62204]: DEBUG nova.compute.manager [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Refreshing instance network info cache due to event network-changed-f0ae2eb6-f038-4347-b9bf-573e69ee42d2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 725.281942] env[62204]: DEBUG oslo_concurrency.lockutils [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] Acquiring lock "refresh_cache-12656a79-a836-452c-8f94-c8e142c9ec2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.283382] env[62204]: DEBUG oslo_concurrency.lockutils [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] Acquired lock "refresh_cache-12656a79-a836-452c-8f94-c8e142c9ec2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.283599] env[62204]: DEBUG nova.network.neutron [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Refreshing network info cache for port f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 725.285303] env[62204]: DEBUG nova.network.neutron [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updated VIF entry in instance network info cache for port 4688967a-f972-4674-959a-9c23ad7c85d8. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 725.285722] env[62204]: DEBUG nova.network.neutron [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updating instance_info_cache with network_info: [{"id": "929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd", "address": "fa:16:3e:da:dc:ea", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap929b6a1e-1d", "ovs_interfaceid": "929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "address": "fa:16:3e:06:6c:48", "network": {"id": "956c5f5f-4fa4-45e7-86c3-29574bedc6bb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-823685576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962bb0fb-5b", "ovs_interfaceid": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4688967a-f972-4674-959a-9c23ad7c85d8", "address": "fa:16:3e:92:1c:fc", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4688967a-f9", "ovs_interfaceid": "4688967a-f972-4674-959a-9c23ad7c85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.295447] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199504, 'name': CreateVM_Task, 'duration_secs': 0.46106} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.295866] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b4f43a2c-d3e3-4179-a344-b59975e2673f tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "ce74983e-8347-425c-967a-6a78a7daa701" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.586s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.297361] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.298475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.298726] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.298957] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.299427] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2785ffca-f6db-4a1a-8d34-e4a0b31910ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.304329] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 725.304329] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52421723-3c09-4fb6-43f8-327493703240" [ 725.304329] env[62204]: _type = "Task" [ 725.304329] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.312977] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52421723-3c09-4fb6-43f8-327493703240, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.385860] env[62204]: INFO nova.compute.manager [-] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Took 1.28 seconds to deallocate network for instance. [ 725.532236] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199507, 'name': CreateVM_Task, 'duration_secs': 0.358743} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.532413] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.533097] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.676262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.791988] env[62204]: DEBUG oslo_concurrency.lockutils [req-3400529f-818f-49cd-a94e-599badf1bb4f req-b91ccb0d-3455-45b3-8817-89e47ab3c97e service nova] Releasing lock "refresh_cache-b0180c2b-8edf-4d15-8d12-c754b73f6030" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.817509] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52421723-3c09-4fb6-43f8-327493703240, 'name': SearchDatastore_Task, 'duration_secs': 0.009304} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.817793] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.818083] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 725.818362] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.818541] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.818723] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 725.819072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.819408] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 725.819661] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-529bf94b-44b6-4207-aa58-4239b601f40e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.821630] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-566a3fb2-117f-4168-80d9-d83183608ee1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.832021] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 725.832021] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52aea79f-0260-4056-d3f0-7c500b1b9dc1" [ 725.832021] env[62204]: _type = "Task" [ 725.832021] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.835791] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 725.835791] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 725.835791] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf99ffd-80d7-4ac4-93e5-4b68d1cecd6a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.839272] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52aea79f-0260-4056-d3f0-7c500b1b9dc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.842213] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 725.842213] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52491ef5-1453-d7f0-5730-8c9159187f8e" [ 725.842213] env[62204]: _type = "Task" [ 725.842213] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.853899] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52491ef5-1453-d7f0-5730-8c9159187f8e, 'name': SearchDatastore_Task} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.854650] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2a40c6a-b878-458c-839e-b23614a970c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.861999] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 725.861999] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52af072c-a324-c20d-d2b7-6748436cd957" [ 725.861999] env[62204]: _type = "Task" [ 725.861999] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.877997] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52af072c-a324-c20d-d2b7-6748436cd957, 'name': SearchDatastore_Task, 'duration_secs': 0.009032} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.881475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.881880] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] b0180c2b-8edf-4d15-8d12-c754b73f6030/b0180c2b-8edf-4d15-8d12-c754b73f6030.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 725.882555] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4786d176-0e01-428b-a93f-669e751af443 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.889081] env[62204]: DEBUG nova.compute.manager [req-105fbb51-34d3-4366-b433-62d241061d03 req-ca7f6eb2-47a9-4911-8012-2f4e1144be7f service nova] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Received event network-vif-deleted-ba160e15-24d0-4e35-af63-89849f63afca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.889145] env[62204]: DEBUG nova.compute.manager [req-105fbb51-34d3-4366-b433-62d241061d03 req-ca7f6eb2-47a9-4911-8012-2f4e1144be7f service nova] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Received event network-vif-deleted-a71dff43-7907-4305-b9cc-260d439fcaab {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 725.890929] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 725.890929] env[62204]: value = "task-1199508" [ 725.890929] env[62204]: _type = "Task" [ 725.890929] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.891721] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.903285] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.029944] env[62204]: DEBUG nova.network.neutron [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Successfully updated port: 70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 726.123511] env[62204]: DEBUG nova.network.neutron [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Updated VIF entry in instance network info cache for port f0ae2eb6-f038-4347-b9bf-573e69ee42d2. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 726.123925] env[62204]: DEBUG nova.network.neutron [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Updating instance_info_cache with network_info: [{"id": "f0ae2eb6-f038-4347-b9bf-573e69ee42d2", "address": "fa:16:3e:d0:ed:a1", "network": {"id": "52af7b22-7224-43af-ae54-ea0374fed3b6", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1070359216-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f3b9a6c4b7e48c2bf5e7b4323185934", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf0ae2eb6-f0", "ovs_interfaceid": "f0ae2eb6-f038-4347-b9bf-573e69ee42d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.234822] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b1e801-5ef0-49a6-80ac-44bb6fd2a00c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.246383] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f0fb84-fa96-440f-9243-292f2361a3f6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.291369] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec684c4-6a1f-46b6-9033-589e42fce3d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.300902] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a36705c-352e-46c2-b131-89833c64b892 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.317123] env[62204]: DEBUG nova.compute.provider_tree [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 726.342070] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52aea79f-0260-4056-d3f0-7c500b1b9dc1, 'name': SearchDatastore_Task, 'duration_secs': 0.017099} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.342519] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.342760] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.343104] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.343275] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.343474] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.343740] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-459f5416-7361-480f-8d0f-c11f89f8b57a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.351987] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.352198] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.352919] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af1cf2e3-125b-4f1a-a2e4-3022bca83640 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.357838] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 726.357838] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260b2e9-6690-baea-6bfb-589f5f6d48b6" [ 726.357838] env[62204]: _type = "Task" [ 726.357838] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.365295] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260b2e9-6690-baea-6bfb-589f5f6d48b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.401875] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480022} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.402154] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] b0180c2b-8edf-4d15-8d12-c754b73f6030/b0180c2b-8edf-4d15-8d12-c754b73f6030.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 726.402367] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 726.402605] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43e7ee17-00d0-4c35-8ba0-ad81bd0bda1d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.408811] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 726.408811] env[62204]: value = "task-1199509" [ 726.408811] env[62204]: _type = "Task" [ 726.408811] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.416867] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.533967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "refresh_cache-258ec37d-c791-4c43-8725-0f4b4bbf9b5b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.534165] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquired lock "refresh_cache-258ec37d-c791-4c43-8725-0f4b4bbf9b5b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.534347] env[62204]: DEBUG nova.network.neutron [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 726.627296] env[62204]: DEBUG oslo_concurrency.lockutils [req-a53fce8d-49b1-4403-8e54-bd05a75277d7 req-f7ee51ee-e08b-4ee6-8a0a-6a8ea9f60605 service nova] Releasing lock "refresh_cache-12656a79-a836-452c-8f94-c8e142c9ec2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.672042] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "7c21539c-35fa-4f58-beb0-e965ffaf79af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.672171] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "7c21539c-35fa-4f58-beb0-e965ffaf79af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.672379] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "7c21539c-35fa-4f58-beb0-e965ffaf79af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.672557] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "7c21539c-35fa-4f58-beb0-e965ffaf79af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 726.672713] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "7c21539c-35fa-4f58-beb0-e965ffaf79af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.675113] env[62204]: INFO nova.compute.manager [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Terminating instance [ 726.676782] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "refresh_cache-7c21539c-35fa-4f58-beb0-e965ffaf79af" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 726.676935] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquired lock "refresh_cache-7c21539c-35fa-4f58-beb0-e965ffaf79af" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.677118] env[62204]: DEBUG nova.network.neutron [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 726.820393] env[62204]: DEBUG nova.scheduler.client.report [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 726.868475] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260b2e9-6690-baea-6bfb-589f5f6d48b6, 'name': SearchDatastore_Task, 'duration_secs': 0.007949} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.869337] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d86b4978-aac8-4fb6-8d0c-e8e2c3b1eda8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.874801] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 726.874801] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c4b7f1-dcec-4fa0-b044-3fe9d0eda7b8" [ 726.874801] env[62204]: _type = "Task" [ 726.874801] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.882214] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c4b7f1-dcec-4fa0-b044-3fe9d0eda7b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.918236] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096873} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.918506] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.919255] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063a42ee-3642-4692-88d3-fb380e0fb8c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.946146] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] b0180c2b-8edf-4d15-8d12-c754b73f6030/b0180c2b-8edf-4d15-8d12-c754b73f6030.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.946609] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc4d5a84-6406-40dd-892a-5006a93f3372 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.966152] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 726.966152] env[62204]: value = "task-1199510" [ 726.966152] env[62204]: _type = "Task" [ 726.966152] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.974100] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199510, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.063515] env[62204]: DEBUG nova.network.neutron [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.197102] env[62204]: DEBUG nova.network.neutron [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 727.216231] env[62204]: DEBUG nova.network.neutron [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Updating instance_info_cache with network_info: [{"id": "70b30639-0d69-468e-b14e-204a65bc34d5", "address": "fa:16:3e:f4:ca:46", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b30639-0d", "ovs_interfaceid": "70b30639-0d69-468e-b14e-204a65bc34d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.263951] env[62204]: DEBUG nova.network.neutron [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.303809] env[62204]: DEBUG nova.compute.manager [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Received event network-vif-plugged-70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.304037] env[62204]: DEBUG oslo_concurrency.lockutils [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] Acquiring lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 727.304245] env[62204]: DEBUG oslo_concurrency.lockutils [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.304412] env[62204]: DEBUG oslo_concurrency.lockutils [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.304576] env[62204]: DEBUG nova.compute.manager [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] No waiting events found dispatching network-vif-plugged-70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 727.304738] env[62204]: WARNING nova.compute.manager [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Received unexpected event network-vif-plugged-70b30639-0d69-468e-b14e-204a65bc34d5 for instance with vm_state building and task_state spawning. [ 727.304894] env[62204]: DEBUG nova.compute.manager [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Received event network-changed-70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 727.305240] env[62204]: DEBUG nova.compute.manager [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Refreshing instance network info cache due to event network-changed-70b30639-0d69-468e-b14e-204a65bc34d5. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 727.305431] env[62204]: DEBUG oslo_concurrency.lockutils [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] Acquiring lock "refresh_cache-258ec37d-c791-4c43-8725-0f4b4bbf9b5b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 727.325748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.326208] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 727.328573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.464s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 727.328760] env[62204]: DEBUG nova.objects.instance [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 727.385569] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c4b7f1-dcec-4fa0-b044-3fe9d0eda7b8, 'name': SearchDatastore_Task, 'duration_secs': 0.009143} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.385837] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.386137] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 12656a79-a836-452c-8f94-c8e142c9ec2f/12656a79-a836-452c-8f94-c8e142c9ec2f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.386402] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-153dda47-8a36-46b3-ba5a-0d8938664ab9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.393194] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 727.393194] env[62204]: value = "task-1199511" [ 727.393194] env[62204]: _type = "Task" [ 727.393194] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.401155] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.476190] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199510, 'name': ReconfigVM_Task, 'duration_secs': 0.27124} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.479033] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Reconfigured VM instance instance-0000002c to attach disk [datastore2] b0180c2b-8edf-4d15-8d12-c754b73f6030/b0180c2b-8edf-4d15-8d12-c754b73f6030.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 727.479033] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e5b8b8e-3a2a-4808-9a7b-7f7f7e154812 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.483034] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 727.483034] env[62204]: value = "task-1199512" [ 727.483034] env[62204]: _type = "Task" [ 727.483034] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.491119] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199512, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.722454] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Releasing lock "refresh_cache-258ec37d-c791-4c43-8725-0f4b4bbf9b5b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.722454] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Instance network_info: |[{"id": "70b30639-0d69-468e-b14e-204a65bc34d5", "address": "fa:16:3e:f4:ca:46", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b30639-0d", "ovs_interfaceid": "70b30639-0d69-468e-b14e-204a65bc34d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 727.722454] env[62204]: DEBUG oslo_concurrency.lockutils [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] Acquired lock "refresh_cache-258ec37d-c791-4c43-8725-0f4b4bbf9b5b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.722749] env[62204]: DEBUG nova.network.neutron [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Refreshing network info cache for port 70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 727.724221] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:ca:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfbfc55d-8126-40dd-998e-8600ea92f97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70b30639-0d69-468e-b14e-204a65bc34d5', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.733322] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Creating folder: Project (96fd673209d940caa3186ee212162259). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.735174] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 727.735290] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a4b3066-5af7-4855-94e2-40a1b33caa38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.737239] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 727.752272] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Created folder: Project (96fd673209d940caa3186ee212162259) in parent group-v259933. [ 727.752551] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Creating folder: Instances. Parent ref: group-v259975. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.752834] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14bad961-49f5-49e5-b88c-3f76e77f2351 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.766739] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Releasing lock "refresh_cache-7c21539c-35fa-4f58-beb0-e965ffaf79af" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 727.767349] env[62204]: DEBUG nova.compute.manager [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 727.767724] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.771050] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ba8eaf-dd7b-42a0-a022-c86dba9789b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.776202] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Created folder: Instances in parent group-v259975. [ 727.776785] env[62204]: DEBUG oslo.service.loopingcall [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.776785] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.777294] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-511d3ddd-7074-4197-bfe1-e595684654a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.794323] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 727.794996] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72ed5606-0e3b-4cd6-a8ee-1042a7aefddf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.800513] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.800513] env[62204]: value = "task-1199515" [ 727.800513] env[62204]: _type = "Task" [ 727.800513] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.805251] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 727.805251] env[62204]: value = "task-1199516" [ 727.805251] env[62204]: _type = "Task" [ 727.805251] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.811888] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199515, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.819138] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199516, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.836343] env[62204]: DEBUG nova.compute.utils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 727.837656] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 727.837830] env[62204]: DEBUG nova.network.neutron [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 727.902177] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199511, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471175} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.902461] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 12656a79-a836-452c-8f94-c8e142c9ec2f/12656a79-a836-452c-8f94-c8e142c9ec2f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 727.902664] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 727.902915] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7f7bb3a-a1fb-4199-bf62-6b7090f24d30 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.907574] env[62204]: DEBUG nova.policy [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7aa5307c531e4703b2990f562e975571', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2955f14089a84d1fabafb933cd7bec04', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 727.911016] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 727.911016] env[62204]: value = "task-1199517" [ 727.911016] env[62204]: _type = "Task" [ 727.911016] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.918705] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.993205] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199512, 'name': Rename_Task, 'duration_secs': 0.152239} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.993569] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.993898] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-116993b9-9fbd-4c20-a21c-088c2aa1758c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.001180] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 728.001180] env[62204]: value = "task-1199518" [ 728.001180] env[62204]: _type = "Task" [ 728.001180] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.008990] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.247536] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 728.247723] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 728.247845] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Rebuilding the list of instances to heal {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 728.313866] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199515, 'name': CreateVM_Task, 'duration_secs': 0.352172} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.316711] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.316986] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199516, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.317634] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.317792] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.318112] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 728.318350] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c9ee9ce-1ea3-4a2a-b870-81e5087ba484 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.322522] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 728.322522] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52319afc-759b-fe89-d4a1-78f2077d336c" [ 728.322522] env[62204]: _type = "Task" [ 728.322522] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.329699] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52319afc-759b-fe89-d4a1-78f2077d336c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.332161] env[62204]: DEBUG nova.network.neutron [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Successfully created port: 0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.340829] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d870c8c-e727-47c8-9bd7-661fb67cc121 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.341910] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 728.344193] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.867s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.344401] env[62204]: DEBUG nova.objects.instance [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lazy-loading 'resources' on Instance uuid bcb11a72-4394-42a2-9a9f-295adc1abcd0 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.423792] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064509} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.424128] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.425999] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4887db-8bc4-4a7d-a2a4-364bb083431a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.459345] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] 12656a79-a836-452c-8f94-c8e142c9ec2f/12656a79-a836-452c-8f94-c8e142c9ec2f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.461022] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76667b8a-7fbf-47ea-abd8-a356f8fc719a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.482136] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 728.482136] env[62204]: value = "task-1199519" [ 728.482136] env[62204]: _type = "Task" [ 728.482136] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.490260] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199519, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.510020] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199518, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.697205] env[62204]: DEBUG nova.network.neutron [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Updated VIF entry in instance network info cache for port 70b30639-0d69-468e-b14e-204a65bc34d5. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 728.697658] env[62204]: DEBUG nova.network.neutron [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Updating instance_info_cache with network_info: [{"id": "70b30639-0d69-468e-b14e-204a65bc34d5", "address": "fa:16:3e:f4:ca:46", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.105", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70b30639-0d", "ovs_interfaceid": "70b30639-0d69-468e-b14e-204a65bc34d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.754224] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Skipping network cache update for instance because it is being deleted. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 728.754525] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 728.754725] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 728.754987] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 728.755195] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 728.789599] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "refresh_cache-bcb11a72-4394-42a2-9a9f-295adc1abcd0" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.789599] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquired lock "refresh_cache-bcb11a72-4394-42a2-9a9f-295adc1abcd0" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.789599] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Forcefully refreshing network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 728.789599] env[62204]: DEBUG nova.objects.instance [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lazy-loading 'info_cache' on Instance uuid bcb11a72-4394-42a2-9a9f-295adc1abcd0 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 728.814619] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199516, 'name': PowerOffVM_Task, 'duration_secs': 0.963795} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.814842] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 728.815051] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 728.815354] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e590124-2ecc-4b70-aea6-f45393cf87a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.833940] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52319afc-759b-fe89-d4a1-78f2077d336c, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.834363] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.834631] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.834922] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.835188] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.835419] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.836056] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4a9d273-4295-48ad-8ce6-e307481391f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.840313] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 728.840505] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 728.840676] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleting the datastore file [datastore2] 7c21539c-35fa-4f58-beb0-e965ffaf79af {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.840898] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42f678b7-6759-446b-973a-2b70f6d30870 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.844442] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 728.844609] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 728.845318] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40df3b60-ab67-43e0-9264-d8f1fc87d8f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.855128] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for the task: (returnval){ [ 728.855128] env[62204]: value = "task-1199521" [ 728.855128] env[62204]: _type = "Task" [ 728.855128] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.860897] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 728.860897] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bac80c-c26c-e22c-46d5-4ea2a861c96c" [ 728.860897] env[62204]: _type = "Task" [ 728.860897] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.870086] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199521, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.876235] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bac80c-c26c-e22c-46d5-4ea2a861c96c, 'name': SearchDatastore_Task, 'duration_secs': 0.007674} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.877191] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-becfd12a-1d2d-49e4-aea0-3a3ee51828d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.882734] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 728.882734] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525b948d-fcaa-bc69-0d40-e29f3af49c06" [ 728.882734] env[62204]: _type = "Task" [ 728.882734] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.891985] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525b948d-fcaa-bc69-0d40-e29f3af49c06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.992237] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199519, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.016086] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199518, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.200438] env[62204]: DEBUG oslo_concurrency.lockutils [req-d5e03939-d302-4225-b1dc-a609ddfebaca req-59f260c3-fb7d-41e8-b23d-a7ee603d8dff service nova] Releasing lock "refresh_cache-258ec37d-c791-4c43-8725-0f4b4bbf9b5b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.299775] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ea14d3-fc33-40d6-8f31-79e2b493cde4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.307963] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b40c05-db81-4106-9ca1-2212fef09aa1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.339062] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264cbe72-f19d-43fc-945e-cdb4ba85d781 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.346697] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e65c3e-64b1-4a93-9a07-2036d7436c98 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.360250] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 729.362753] env[62204]: DEBUG nova.compute.provider_tree [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.372436] env[62204]: DEBUG oslo_vmware.api [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Task: {'id': task-1199521, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100969} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.373266] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.373558] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.373613] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.373777] env[62204]: INFO nova.compute.manager [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Took 1.61 seconds to destroy the instance on the hypervisor. [ 729.374100] env[62204]: DEBUG oslo.service.loopingcall [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 729.374224] env[62204]: DEBUG nova.compute.manager [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 729.374318] env[62204]: DEBUG nova.network.neutron [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 729.389749] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 729.390056] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 729.390276] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 729.390528] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 729.390751] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 729.390943] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 729.391215] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 729.391385] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 729.391554] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 729.391714] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 729.391884] env[62204]: DEBUG nova.virt.hardware [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 729.392856] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c7cb7f-8b0f-45ee-b51e-55a1a49e5af9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.396125] env[62204]: DEBUG nova.network.neutron [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.400282] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525b948d-fcaa-bc69-0d40-e29f3af49c06, 'name': SearchDatastore_Task, 'duration_secs': 0.008606} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.400987] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 729.401247] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 258ec37d-c791-4c43-8725-0f4b4bbf9b5b/258ec37d-c791-4c43-8725-0f4b4bbf9b5b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 729.401509] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4ac2c327-2747-4f10-ae72-d852325d2476 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.406895] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f201bea-dc25-45e7-9069-cc32ffe0dad6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.411940] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 729.411940] env[62204]: value = "task-1199522" [ 729.411940] env[62204]: _type = "Task" [ 729.411940] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.429232] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.493292] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199519, 'name': ReconfigVM_Task, 'duration_secs': 0.532098} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.493648] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Reconfigured VM instance instance-0000002d to attach disk [datastore2] 12656a79-a836-452c-8f94-c8e142c9ec2f/12656a79-a836-452c-8f94-c8e142c9ec2f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 729.494212] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7849305d-2910-4ace-8da1-c9c899acd2b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.500745] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 729.500745] env[62204]: value = "task-1199523" [ 729.500745] env[62204]: _type = "Task" [ 729.500745] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.511630] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199523, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.514481] env[62204]: DEBUG oslo_vmware.api [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199518, 'name': PowerOnVM_Task, 'duration_secs': 1.136396} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.514712] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 729.514910] env[62204]: INFO nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Took 13.03 seconds to spawn the instance on the hypervisor. [ 729.515127] env[62204]: DEBUG nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 729.515831] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc70e17-94cf-4cdc-ac23-675329698e7b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.822661] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 729.869667] env[62204]: DEBUG nova.scheduler.client.report [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.901178] env[62204]: DEBUG nova.network.neutron [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.922784] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508485} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.923117] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 258ec37d-c791-4c43-8725-0f4b4bbf9b5b/258ec37d-c791-4c43-8725-0f4b4bbf9b5b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 729.923278] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 729.923521] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6aad8d8c-5aed-4224-b38f-ec6f93f4dd50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.933034] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 729.933034] env[62204]: value = "task-1199524" [ 729.933034] env[62204]: _type = "Task" [ 729.933034] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.939022] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.013024] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199523, 'name': Rename_Task, 'duration_secs': 0.20271} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.013024] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.013024] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2960b7c7-5d48-47d7-b6ef-9b497e314859 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.019063] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 730.019063] env[62204]: value = "task-1199525" [ 730.019063] env[62204]: _type = "Task" [ 730.019063] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.030719] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199525, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.039161] env[62204]: INFO nova.compute.manager [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Took 32.72 seconds to build instance. [ 730.284352] env[62204]: DEBUG nova.compute.manager [req-33da489d-2a51-4fe0-a124-f1189e0fe35a req-1ed23612-3bd6-4372-9927-5168b3918b89 service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Received event network-vif-plugged-0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 730.284559] env[62204]: DEBUG oslo_concurrency.lockutils [req-33da489d-2a51-4fe0-a124-f1189e0fe35a req-1ed23612-3bd6-4372-9927-5168b3918b89 service nova] Acquiring lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.284768] env[62204]: DEBUG oslo_concurrency.lockutils [req-33da489d-2a51-4fe0-a124-f1189e0fe35a req-1ed23612-3bd6-4372-9927-5168b3918b89 service nova] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.284938] env[62204]: DEBUG oslo_concurrency.lockutils [req-33da489d-2a51-4fe0-a124-f1189e0fe35a req-1ed23612-3bd6-4372-9927-5168b3918b89 service nova] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.285205] env[62204]: DEBUG nova.compute.manager [req-33da489d-2a51-4fe0-a124-f1189e0fe35a req-1ed23612-3bd6-4372-9927-5168b3918b89 service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] No waiting events found dispatching network-vif-plugged-0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 730.285377] env[62204]: WARNING nova.compute.manager [req-33da489d-2a51-4fe0-a124-f1189e0fe35a req-1ed23612-3bd6-4372-9927-5168b3918b89 service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Received unexpected event network-vif-plugged-0bda5859-fbaf-4c89-84b6-e50ec57665a5 for instance with vm_state building and task_state spawning. [ 730.366844] env[62204]: DEBUG nova.network.neutron [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Successfully updated port: 0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.375366] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.378593] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.735s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.381020] env[62204]: INFO nova.compute.claims [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.403364] env[62204]: INFO nova.compute.manager [-] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Took 1.03 seconds to deallocate network for instance. [ 730.411908] env[62204]: INFO nova.scheduler.client.report [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Deleted allocations for instance bcb11a72-4394-42a2-9a9f-295adc1abcd0 [ 730.438388] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.443205] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077253} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.443602] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 730.444456] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcd1221-3e06-4e1f-8f11-390d7c596a76 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.468909] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] 258ec37d-c791-4c43-8725-0f4b4bbf9b5b/258ec37d-c791-4c43-8725-0f4b4bbf9b5b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 730.469799] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b762b8a1-5e0a-4167-8727-2a6f0f76855b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.491312] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 730.491312] env[62204]: value = "task-1199526" [ 730.491312] env[62204]: _type = "Task" [ 730.491312] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.499636] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.529731] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199525, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.541913] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2131982e-38af-4b6d-8a0f-f4ea07d7a697 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.799s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.869406] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "refresh_cache-186a2de8-2b9e-4c84-8502-cb0ed3b43123" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.869566] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquired lock "refresh_cache-186a2de8-2b9e-4c84-8502-cb0ed3b43123" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.869722] env[62204]: DEBUG nova.network.neutron [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.914496] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.919339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-895d4696-623d-4f25-b9f9-532ff1805ee4 tempest-ServerShowV254Test-1426703937 tempest-ServerShowV254Test-1426703937-project-member] Lock "bcb11a72-4394-42a2-9a9f-295adc1abcd0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.225s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.938649] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Releasing lock "refresh_cache-bcb11a72-4394-42a2-9a9f-295adc1abcd0" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.938868] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Updated the network info_cache for instance {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 730.939069] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.939232] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.939529] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.939529] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.939741] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.939787] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.940529] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 730.940529] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.001766] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199526, 'name': ReconfigVM_Task, 'duration_secs': 0.403014} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.002020] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Reconfigured VM instance instance-0000002e to attach disk [datastore2] 258ec37d-c791-4c43-8725-0f4b4bbf9b5b/258ec37d-c791-4c43-8725-0f4b4bbf9b5b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 731.002667] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0218912-93f7-4f89-bd98-3567a0199a58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.009694] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 731.009694] env[62204]: value = "task-1199527" [ 731.009694] env[62204]: _type = "Task" [ 731.009694] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.019252] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199527, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.029134] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199525, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.044755] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 731.235036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "b0180c2b-8edf-4d15-8d12-c754b73f6030" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.235361] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.235605] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.235803] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.235993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.240518] env[62204]: INFO nova.compute.manager [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Terminating instance [ 731.244069] env[62204]: DEBUG nova.compute.manager [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 731.244263] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.245866] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2c9516-7e0a-4bda-a061-c519fd4b7d49 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.254663] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.254957] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98cf1914-7894-4834-91c2-0657686e9820 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.261370] env[62204]: DEBUG oslo_vmware.api [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 731.261370] env[62204]: value = "task-1199528" [ 731.261370] env[62204]: _type = "Task" [ 731.261370] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.273244] env[62204]: DEBUG oslo_vmware.api [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199528, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.442196] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.447636] env[62204]: DEBUG nova.network.neutron [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.524117] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199527, 'name': Rename_Task, 'duration_secs': 0.388598} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.527287] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 731.527576] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-28e61213-c454-48e7-bdde-9e96917ebc3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.537446] env[62204]: DEBUG oslo_vmware.api [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199525, 'name': PowerOnVM_Task, 'duration_secs': 1.467684} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.538697] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.538912] env[62204]: INFO nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Took 9.35 seconds to spawn the instance on the hypervisor. [ 731.539108] env[62204]: DEBUG nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 731.539429] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 731.539429] env[62204]: value = "task-1199529" [ 731.539429] env[62204]: _type = "Task" [ 731.539429] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.540109] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805888a7-3e8f-4928-a9ba-7b5733fadf8d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.564175] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199529, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.575108] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.767274] env[62204]: DEBUG nova.network.neutron [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Updating instance_info_cache with network_info: [{"id": "0bda5859-fbaf-4c89-84b6-e50ec57665a5", "address": "fa:16:3e:6e:b7:73", "network": {"id": "1de5fdc9-8d75-4c2b-87f0-0744fea223a9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-679044530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2955f14089a84d1fabafb933cd7bec04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bda5859-fb", "ovs_interfaceid": "0bda5859-fbaf-4c89-84b6-e50ec57665a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.773811] env[62204]: DEBUG oslo_vmware.api [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199528, 'name': PowerOffVM_Task, 'duration_secs': 0.362278} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.774302] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.774512] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.774783] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b21e501a-c134-47da-8f1c-0ac1f6c377d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.830233] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1efaef5-ea00-45e5-98f6-78a861b0e200 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.837437] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764cd64a-1d48-4bf6-854e-32850df6d469 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.870344] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff16778a-7ba8-4be7-b7a3-18a32b832033 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.878083] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d644dd2a-4dd5-45d2-a9de-cbc58fb1099b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.897223] env[62204]: DEBUG nova.compute.provider_tree [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.056123] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199529, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.073357] env[62204]: INFO nova.compute.manager [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Took 31.57 seconds to build instance. [ 732.270054] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Releasing lock "refresh_cache-186a2de8-2b9e-4c84-8502-cb0ed3b43123" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.270441] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Instance network_info: |[{"id": "0bda5859-fbaf-4c89-84b6-e50ec57665a5", "address": "fa:16:3e:6e:b7:73", "network": {"id": "1de5fdc9-8d75-4c2b-87f0-0744fea223a9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-679044530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2955f14089a84d1fabafb933cd7bec04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bda5859-fb", "ovs_interfaceid": "0bda5859-fbaf-4c89-84b6-e50ec57665a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 732.273258] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:b7:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0bda5859-fbaf-4c89-84b6-e50ec57665a5', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.279382] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Creating folder: Project (2955f14089a84d1fabafb933cd7bec04). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.279734] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f33bfb0c-2cf0-44ae-ab21-6cd549ad61c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.290738] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Created folder: Project (2955f14089a84d1fabafb933cd7bec04) in parent group-v259933. [ 732.291189] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Creating folder: Instances. Parent ref: group-v259978. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.291282] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e21b2d96-ed11-4b11-887a-efad66a9c5d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.300558] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Created folder: Instances in parent group-v259978. [ 732.300790] env[62204]: DEBUG oslo.service.loopingcall [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 732.300976] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 732.301182] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-744abf89-ee7d-46ae-9364-435ef55ea499 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.320578] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.320578] env[62204]: value = "task-1199533" [ 732.320578] env[62204]: _type = "Task" [ 732.320578] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.328118] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199533, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.344822] env[62204]: DEBUG nova.compute.manager [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Received event network-changed-0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 732.345054] env[62204]: DEBUG nova.compute.manager [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Refreshing instance network info cache due to event network-changed-0bda5859-fbaf-4c89-84b6-e50ec57665a5. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 732.345258] env[62204]: DEBUG oslo_concurrency.lockutils [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] Acquiring lock "refresh_cache-186a2de8-2b9e-4c84-8502-cb0ed3b43123" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.345400] env[62204]: DEBUG oslo_concurrency.lockutils [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] Acquired lock "refresh_cache-186a2de8-2b9e-4c84-8502-cb0ed3b43123" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.345544] env[62204]: DEBUG nova.network.neutron [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Refreshing network info cache for port 0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 732.402831] env[62204]: DEBUG nova.scheduler.client.report [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 732.553107] env[62204]: DEBUG oslo_vmware.api [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199529, 'name': PowerOnVM_Task, 'duration_secs': 0.5995} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.553590] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 732.553825] env[62204]: INFO nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Took 7.78 seconds to spawn the instance on the hypervisor. [ 732.554062] env[62204]: DEBUG nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 732.555197] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18f6eb6-b213-457f-a875-817869f0b314 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.575418] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d5b4de3f-9e3b-45f2-9d41-9fb213b67de2 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 135.265s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.830070] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199533, 'name': CreateVM_Task, 'duration_secs': 0.369551} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.830250] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.830936] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.831108] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.831418] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 732.831663] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18dd5618-b87a-4334-a24a-4323ec82187c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.835790] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 732.835790] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520357f1-3e16-74a7-bec9-b69a6d1d300e" [ 732.835790] env[62204]: _type = "Task" [ 732.835790] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.843017] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520357f1-3e16-74a7-bec9-b69a6d1d300e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.910825] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.911371] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 732.917377] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.154s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.919449] env[62204]: INFO nova.compute.claims [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.923210] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "12656a79-a836-452c-8f94-c8e142c9ec2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.923499] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.923757] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "12656a79-a836-452c-8f94-c8e142c9ec2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.923995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.924285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.926396] env[62204]: INFO nova.compute.manager [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Terminating instance [ 732.928413] env[62204]: DEBUG nova.compute.manager [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 732.928683] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.930023] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4bcb1d-b1c2-4d0d-9ec3-d6a3dddb84f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.939670] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 732.940208] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c45a522a-4e2d-4b3b-8442-371a21211abb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.946992] env[62204]: DEBUG oslo_vmware.api [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 732.946992] env[62204]: value = "task-1199534" [ 732.946992] env[62204]: _type = "Task" [ 732.946992] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.954771] env[62204]: DEBUG oslo_vmware.api [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.076348] env[62204]: INFO nova.compute.manager [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Took 31.11 seconds to build instance. [ 733.083241] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 733.252743] env[62204]: DEBUG nova.network.neutron [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Updated VIF entry in instance network info cache for port 0bda5859-fbaf-4c89-84b6-e50ec57665a5. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 733.253147] env[62204]: DEBUG nova.network.neutron [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Updating instance_info_cache with network_info: [{"id": "0bda5859-fbaf-4c89-84b6-e50ec57665a5", "address": "fa:16:3e:6e:b7:73", "network": {"id": "1de5fdc9-8d75-4c2b-87f0-0744fea223a9", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-679044530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2955f14089a84d1fabafb933cd7bec04", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bda5859-fb", "ovs_interfaceid": "0bda5859-fbaf-4c89-84b6-e50ec57665a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.346810] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520357f1-3e16-74a7-bec9-b69a6d1d300e, 'name': SearchDatastore_Task, 'duration_secs': 0.01178} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.347197] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.347430] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 733.347682] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.347826] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.347999] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 733.348281] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c73698f-40fd-43dc-b57d-aa735921a749 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.356532] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 733.356707] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 733.357428] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a764c2c-8772-4c35-9d90-73c12bb4543c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.365434] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 733.365434] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202955e-d550-7c91-041e-97a5e73f0258" [ 733.365434] env[62204]: _type = "Task" [ 733.365434] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.372703] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202955e-d550-7c91-041e-97a5e73f0258, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.427204] env[62204]: DEBUG nova.compute.utils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 733.430133] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 733.430133] env[62204]: DEBUG nova.network.neutron [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 733.457158] env[62204]: DEBUG oslo_vmware.api [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199534, 'name': PowerOffVM_Task, 'duration_secs': 0.261071} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.457414] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 733.457580] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 733.458307] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a02b453-6d54-496e-a0b6-61b8015ccc92 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.479591] env[62204]: DEBUG nova.policy [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 733.579298] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2140b8ee-af57-4117-8d49-7f7c1b5d56c3 tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.550s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.616187] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.688734] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.688988] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.689225] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.689406] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.689575] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.691910] env[62204]: INFO nova.compute.manager [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Terminating instance [ 733.693845] env[62204]: DEBUG nova.compute.manager [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 733.694046] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.694862] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effce2ea-e29b-4982-b6fd-64fa0fc2d4bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.702611] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 733.702862] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53efbe4f-6948-4e9d-ac44-40be40240cf1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.709497] env[62204]: DEBUG oslo_vmware.api [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 733.709497] env[62204]: value = "task-1199536" [ 733.709497] env[62204]: _type = "Task" [ 733.709497] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.718258] env[62204]: DEBUG oslo_vmware.api [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.757722] env[62204]: DEBUG oslo_concurrency.lockutils [req-7813132d-1e32-4ff8-96da-263656ad9243 req-2c4abe85-08c5-41db-8872-be111b22b73a service nova] Releasing lock "refresh_cache-186a2de8-2b9e-4c84-8502-cb0ed3b43123" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.882391] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202955e-d550-7c91-041e-97a5e73f0258, 'name': SearchDatastore_Task, 'duration_secs': 0.012094} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.882391] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6158e9d6-edbb-4ce2-bcb3-a03b5a445491 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.885926] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 733.885926] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52754579-be76-9f1c-c006-847658cd777f" [ 733.885926] env[62204]: _type = "Task" [ 733.885926] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.894662] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52754579-be76-9f1c-c006-847658cd777f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.937790] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 733.952095] env[62204]: DEBUG nova.network.neutron [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Successfully created port: c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.084433] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.090063] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.090469] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.090807] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Deleting the datastore file [datastore2] 12656a79-a836-452c-8f94-c8e142c9ec2f {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.091371] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a22ae49-9e74-4b0d-8472-6f1921b06a91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.099272] env[62204]: DEBUG oslo_vmware.api [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for the task: (returnval){ [ 734.099272] env[62204]: value = "task-1199537" [ 734.099272] env[62204]: _type = "Task" [ 734.099272] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.099272] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.099272] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.099272] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Deleting the datastore file [datastore2] b0180c2b-8edf-4d15-8d12-c754b73f6030 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.103008] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7092bdf6-00d8-45ea-83e8-658f1c98a49c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.113934] env[62204]: DEBUG oslo_vmware.api [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.115355] env[62204]: DEBUG oslo_vmware.api [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 734.115355] env[62204]: value = "task-1199538" [ 734.115355] env[62204]: _type = "Task" [ 734.115355] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.128817] env[62204]: DEBUG oslo_vmware.api [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.222722] env[62204]: DEBUG oslo_vmware.api [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199536, 'name': PowerOffVM_Task, 'duration_secs': 0.209299} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.222783] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 734.222977] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 734.223682] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31aa47c8-71f9-4404-be55-6e9914e279cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.287081] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 734.287081] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 734.287081] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Deleting the datastore file [datastore2] 258ec37d-c791-4c43-8725-0f4b4bbf9b5b {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.287081] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b00db8f4-3e04-46f5-bc70-e9eca95ba2c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.296250] env[62204]: DEBUG oslo_vmware.api [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for the task: (returnval){ [ 734.296250] env[62204]: value = "task-1199540" [ 734.296250] env[62204]: _type = "Task" [ 734.296250] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.306431] env[62204]: DEBUG oslo_vmware.api [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.358813] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ef06d2-24df-49b3-ab80-3283624a2eea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.366068] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32a22c6-a79e-49d9-94df-4035ef0b2066 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.400642] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc41f89-2034-4f53-bdc7-086e71234c9c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.410360] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52754579-be76-9f1c-c006-847658cd777f, 'name': SearchDatastore_Task, 'duration_secs': 0.010939} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.412039] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.412270] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 186a2de8-2b9e-4c84-8502-cb0ed3b43123/186a2de8-2b9e-4c84-8502-cb0ed3b43123.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 734.412568] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c40fea7f-1f1d-4b47-b669-1c50a0ae79c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.415320] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acfb6e2-eddb-43a4-bb9b-b33a06daeccc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.431944] env[62204]: DEBUG nova.compute.provider_tree [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.435930] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 734.435930] env[62204]: value = "task-1199541" [ 734.435930] env[62204]: _type = "Task" [ 734.435930] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.445245] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.607032] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.610299] env[62204]: DEBUG oslo_vmware.api [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Task: {'id': task-1199537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168006} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.610592] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 734.610816] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 734.610999] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.611251] env[62204]: INFO nova.compute.manager [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Took 1.68 seconds to destroy the instance on the hypervisor. [ 734.611518] env[62204]: DEBUG oslo.service.loopingcall [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.611718] env[62204]: DEBUG nova.compute.manager [-] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.611814] env[62204]: DEBUG nova.network.neutron [-] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.628703] env[62204]: DEBUG oslo_vmware.api [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18816} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.629049] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 734.629291] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 734.629495] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.629674] env[62204]: INFO nova.compute.manager [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Took 3.39 seconds to destroy the instance on the hypervisor. [ 734.629916] env[62204]: DEBUG oslo.service.loopingcall [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.630130] env[62204]: DEBUG nova.compute.manager [-] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.630222] env[62204]: DEBUG nova.network.neutron [-] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.810900] env[62204]: DEBUG oslo_vmware.api [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Task: {'id': task-1199540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137145} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.811198] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 734.811466] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 734.811705] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.811907] env[62204]: INFO nova.compute.manager [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Took 1.12 seconds to destroy the instance on the hypervisor. [ 734.812197] env[62204]: DEBUG oslo.service.loopingcall [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.812402] env[62204]: DEBUG nova.compute.manager [-] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.812492] env[62204]: DEBUG nova.network.neutron [-] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.936506] env[62204]: DEBUG nova.scheduler.client.report [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.948021] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 734.960153] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454315} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.960430] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 186a2de8-2b9e-4c84-8502-cb0ed3b43123/186a2de8-2b9e-4c84-8502-cb0ed3b43123.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.960648] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 734.960901] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dca5ebfd-708b-48ae-a2d7-54b1903e412d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.969441] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 734.969441] env[62204]: value = "task-1199542" [ 734.969441] env[62204]: _type = "Task" [ 734.969441] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.979236] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 734.992180] env[62204]: DEBUG nova.virt.hardware [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 734.993227] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec9bf6f-f020-4482-af0a-eb6d85489e76 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.004701] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2001b6b-f122-45f9-8f5e-4d0552a18d16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.012563] env[62204]: DEBUG nova.compute.manager [req-f0ca829e-2ac9-440a-b7db-17fc2d0a9b30 req-1a0e3b6d-29de-42f8-8eb7-c9577611d330 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Received event network-vif-deleted-f0ae2eb6-f038-4347-b9bf-573e69ee42d2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.012752] env[62204]: INFO nova.compute.manager [req-f0ca829e-2ac9-440a-b7db-17fc2d0a9b30 req-1a0e3b6d-29de-42f8-8eb7-c9577611d330 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Neutron deleted interface f0ae2eb6-f038-4347-b9bf-573e69ee42d2; detaching it from the instance and deleting it from the info cache [ 735.012928] env[62204]: DEBUG nova.network.neutron [req-f0ca829e-2ac9-440a-b7db-17fc2d0a9b30 req-1a0e3b6d-29de-42f8-8eb7-c9577611d330 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.031720] env[62204]: DEBUG nova.compute.manager [req-44a57f05-201c-42fb-a1a7-486e94dc2d5d req-85db2185-849d-4532-8de6-d9a861598cca service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-vif-deleted-929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 735.031941] env[62204]: INFO nova.compute.manager [req-44a57f05-201c-42fb-a1a7-486e94dc2d5d req-85db2185-849d-4532-8de6-d9a861598cca service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Neutron deleted interface 929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd; detaching it from the instance and deleting it from the info cache [ 735.032223] env[62204]: DEBUG nova.network.neutron [req-44a57f05-201c-42fb-a1a7-486e94dc2d5d req-85db2185-849d-4532-8de6-d9a861598cca service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updating instance_info_cache with network_info: [{"id": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "address": "fa:16:3e:06:6c:48", "network": {"id": "956c5f5f-4fa4-45e7-86c3-29574bedc6bb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-823685576", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.184", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bab6a6c3-1c5c-4776-b21b-dec21196d702", "external-id": "nsx-vlan-transportzone-634", "segmentation_id": 634, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap962bb0fb-5b", "ovs_interfaceid": "962bb0fb-5bee-480f-ae79-b9ba36189ba1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4688967a-f972-4674-959a-9c23ad7c85d8", "address": "fa:16:3e:92:1c:fc", "network": {"id": "fad8b6a5-be09-4c56-8c46-ab6bf89595c0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1722406786", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "111a2767-1b06-4fe5-852b-40c9b5a699fd", "external-id": "nsx-vlan-transportzone-975", "segmentation_id": 975, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4688967a-f9", "ovs_interfaceid": "4688967a-f972-4674-959a-9c23ad7c85d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.444553] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.527s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.444925] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 735.447646] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.263s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.449174] env[62204]: INFO nova.compute.claims [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.480631] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127145} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.480979] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.481862] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f20900d-0a1a-4dd2-bb1c-b86d29d44df7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.485866] env[62204]: DEBUG nova.network.neutron [-] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.504852] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] 186a2de8-2b9e-4c84-8502-cb0ed3b43123/186a2de8-2b9e-4c84-8502-cb0ed3b43123.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.506549] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5730a060-e373-468a-acdf-20cfe215cac8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.520911] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-50ce018e-ca44-4b93-9004-066b25789679 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.529058] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 735.529058] env[62204]: value = "task-1199543" [ 735.529058] env[62204]: _type = "Task" [ 735.529058] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.536577] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3de1360-342e-49c6-a3eb-0ef75ec3a2b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.548902] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2f9b366-a19b-4603-816d-2cee9b9a6211 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.555070] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199543, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.566472] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8cedc3-679a-4ad9-b004-8ed340e9ce9e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.577929] env[62204]: DEBUG nova.compute.manager [req-f0ca829e-2ac9-440a-b7db-17fc2d0a9b30 req-1a0e3b6d-29de-42f8-8eb7-c9577611d330 service nova] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Detach interface failed, port_id=f0ae2eb6-f038-4347-b9bf-573e69ee42d2, reason: Instance 12656a79-a836-452c-8f94-c8e142c9ec2f could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 735.590808] env[62204]: DEBUG nova.compute.manager [req-44a57f05-201c-42fb-a1a7-486e94dc2d5d req-85db2185-849d-4532-8de6-d9a861598cca service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Detach interface failed, port_id=929b6a1e-1d30-4fd3-b2d2-39d15d4c82bd, reason: Instance b0180c2b-8edf-4d15-8d12-c754b73f6030 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 735.708135] env[62204]: DEBUG nova.network.neutron [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Successfully updated port: c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.731038] env[62204]: DEBUG nova.network.neutron [-] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.930221] env[62204]: DEBUG nova.network.neutron [-] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.955579] env[62204]: DEBUG nova.compute.utils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.957251] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 735.957445] env[62204]: DEBUG nova.network.neutron [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 736.006931] env[62204]: INFO nova.compute.manager [-] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Took 1.39 seconds to deallocate network for instance. [ 736.019491] env[62204]: DEBUG nova.policy [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6be351c4a04947c5b7d396cdc3a28164', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53d52da8765c4d69b803b4f18d0d9bc2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 736.040080] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199543, 'name': ReconfigVM_Task, 'duration_secs': 0.452891} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.040903] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Reconfigured VM instance instance-0000002f to attach disk [datastore2] 186a2de8-2b9e-4c84-8502-cb0ed3b43123/186a2de8-2b9e-4c84-8502-cb0ed3b43123.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.041520] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8448b66-c565-4258-9409-b77b6042c255 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.048030] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 736.048030] env[62204]: value = "task-1199544" [ 736.048030] env[62204]: _type = "Task" [ 736.048030] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.060203] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199544, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.210616] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.210749] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.210910] env[62204]: DEBUG nova.network.neutron [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.232981] env[62204]: INFO nova.compute.manager [-] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Took 1.42 seconds to deallocate network for instance. [ 736.435487] env[62204]: INFO nova.compute.manager [-] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Took 1.81 seconds to deallocate network for instance. [ 736.462726] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 736.464153] env[62204]: DEBUG nova.network.neutron [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Successfully created port: 70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.513589] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.563843] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199544, 'name': Rename_Task, 'duration_secs': 0.13844} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.564950] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 736.565640] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f626c8c0-ac42-4786-bab9-49fc214327a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.575300] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 736.575300] env[62204]: value = "task-1199545" [ 736.575300] env[62204]: _type = "Task" [ 736.575300] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.586461] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.741743] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.750250] env[62204]: DEBUG nova.network.neutron [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.867951] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a68dce-bcd3-42d1-a273-6615b9a84ac3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.876550] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e83189-832b-4e5f-b2fe-c79329a71c80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.909874] env[62204]: DEBUG nova.network.neutron [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.911218] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47119e9-a033-443d-8001-98390525f843 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.919528] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdf3b9a-b497-478f-8ef1-96e11dd66a5a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.934267] env[62204]: DEBUG nova.compute.provider_tree [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.944587] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.968416] env[62204]: INFO nova.virt.block_device [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Booting with volume e1a1d343-79bf-455c-8446-09fa8e9f2035 at /dev/sda [ 737.009570] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e501e75-0396-4139-8a94-65f51a2edcc6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.018882] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e46891f-babf-47a7-8dd9-875b0f42022e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.032440] env[62204]: DEBUG nova.compute.manager [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-vif-plugged-c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.032681] env[62204]: DEBUG oslo_concurrency.lockutils [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.032851] env[62204]: DEBUG oslo_concurrency.lockutils [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.033096] env[62204]: DEBUG oslo_concurrency.lockutils [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.033273] env[62204]: DEBUG nova.compute.manager [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] No waiting events found dispatching network-vif-plugged-c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 737.033439] env[62204]: WARNING nova.compute.manager [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received unexpected event network-vif-plugged-c81069ae-b3b9-4b0d-902f-ed9a2e24542f for instance with vm_state building and task_state spawning. [ 737.033592] env[62204]: DEBUG nova.compute.manager [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-changed-c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.033737] env[62204]: DEBUG nova.compute.manager [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing instance network info cache due to event network-changed-c81069ae-b3b9-4b0d-902f-ed9a2e24542f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 737.033896] env[62204]: DEBUG oslo_concurrency.lockutils [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.041753] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d70525ad-b9fd-4b3a-9980-3fbf61f56915 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.049564] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7829faf7-3575-4673-91b9-e87467dcfc23 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.067550] env[62204]: DEBUG nova.compute.manager [req-0f0356e2-0bcb-44e2-8462-b0da6806cf1f req-cdc3947f-7d18-4ccb-91fb-595cae3663c2 service nova] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Received event network-vif-deleted-70b30639-0d69-468e-b14e-204a65bc34d5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.067750] env[62204]: DEBUG nova.compute.manager [req-0f0356e2-0bcb-44e2-8462-b0da6806cf1f req-cdc3947f-7d18-4ccb-91fb-595cae3663c2 service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-vif-deleted-4688967a-f972-4674-959a-9c23ad7c85d8 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.067920] env[62204]: DEBUG nova.compute.manager [req-0f0356e2-0bcb-44e2-8462-b0da6806cf1f req-cdc3947f-7d18-4ccb-91fb-595cae3663c2 service nova] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Received event network-vif-deleted-962bb0fb-5bee-480f-ae79-b9ba36189ba1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 737.074017] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd4a662-99cf-4fd2-acd0-5cfffa52e1a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.087774] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48372b04-decd-4698-9934-a6ab095d486f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.090105] env[62204]: DEBUG oslo_vmware.api [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199545, 'name': PowerOnVM_Task, 'duration_secs': 0.440673} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.090597] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 737.090797] env[62204]: INFO nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Took 7.73 seconds to spawn the instance on the hypervisor. [ 737.090966] env[62204]: DEBUG nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 737.091965] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de649408-b0a9-43d4-9e1d-b083a36eb9c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.102806] env[62204]: DEBUG nova.virt.block_device [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updating existing volume attachment record: 4921b09b-4c35-4547-87b6-473ec051d364 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 737.417150] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.417616] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Instance network_info: |[{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 737.418051] env[62204]: DEBUG oslo_concurrency.lockutils [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.418382] env[62204]: DEBUG nova.network.neutron [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing network info cache for port c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 737.420035] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:54:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c81069ae-b3b9-4b0d-902f-ed9a2e24542f', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.431827] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Creating folder: Project (81dc15a8604e4900845b79c75cc5ef16). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.435736] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcd38ac8-824c-4ca7-b0b7-2b0df49c0415 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.439357] env[62204]: DEBUG nova.scheduler.client.report [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 737.452460] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Created folder: Project (81dc15a8604e4900845b79c75cc5ef16) in parent group-v259933. [ 737.452740] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Creating folder: Instances. Parent ref: group-v259981. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.453142] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa24fa4d-8015-4b63-adc5-76d26dc5a4d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.462391] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Created folder: Instances in parent group-v259981. [ 737.462709] env[62204]: DEBUG oslo.service.loopingcall [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.462967] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.463247] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e60ea06-90f0-4379-b90d-f2a3567f634d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.494494] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.494494] env[62204]: value = "task-1199548" [ 737.494494] env[62204]: _type = "Task" [ 737.494494] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.502877] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199548, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.616120] env[62204]: INFO nova.compute.manager [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Took 33.59 seconds to build instance. [ 737.683888] env[62204]: DEBUG nova.network.neutron [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updated VIF entry in instance network info cache for port c81069ae-b3b9-4b0d-902f-ed9a2e24542f. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 737.684317] env[62204]: DEBUG nova.network.neutron [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.947529] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.948088] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 737.951754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.455s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.955835] env[62204]: INFO nova.compute.claims [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.995722] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.009742] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199548, 'name': CreateVM_Task, 'duration_secs': 0.322555} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.009921] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.010640] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.010760] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.011122] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 738.011320] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6edb3873-2bb3-42da-a536-c92b9045747f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.016191] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 738.016191] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f552e1-6ae5-0e5b-b354-0438a8a5de57" [ 738.016191] env[62204]: _type = "Task" [ 738.016191] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.024024] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f552e1-6ae5-0e5b-b354-0438a8a5de57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.118880] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d46b61d3-1523-4a8d-8948-202daee35b32 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 132.514s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.120607] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.125s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.122024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.122024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.122024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.123829] env[62204]: INFO nova.compute.manager [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Terminating instance [ 738.129021] env[62204]: DEBUG nova.compute.manager [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 738.129021] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.129021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da70028-eeb6-4c64-95ee-e521752b6bfe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.140549] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.140549] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65b79223-445e-4d1a-9063-6dd07e3efb16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.147029] env[62204]: DEBUG oslo_vmware.api [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 738.147029] env[62204]: value = "task-1199549" [ 738.147029] env[62204]: _type = "Task" [ 738.147029] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.160292] env[62204]: DEBUG oslo_vmware.api [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.186794] env[62204]: DEBUG oslo_concurrency.lockutils [req-d140d79e-fa5b-4da0-9321-f9417afee52a req-0828dca9-d2ec-4cc8-af71-9219d6e8c5d8 service nova] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.397663] env[62204]: DEBUG nova.network.neutron [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Successfully updated port: 70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.459884] env[62204]: DEBUG nova.compute.utils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.461221] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 738.461394] env[62204]: DEBUG nova.network.neutron [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 738.526809] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f552e1-6ae5-0e5b-b354-0438a8a5de57, 'name': SearchDatastore_Task, 'duration_secs': 0.009808} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.528157] env[62204]: DEBUG nova.policy [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53cab7fd384749deb5c7f6e3faa03b1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '286b300e98e244eb8693bb0f3174c121', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 738.529545] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.529768] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 738.529989] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.530149] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.530327] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 738.530575] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62893fd0-5d2d-494b-b759-3f91aa6c018f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.539112] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 738.539112] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 738.539714] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a527131-aa04-4f88-9ca8-9021e719a43f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.544324] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 738.544324] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5286c0a9-82fa-8ecd-1fc8-eea72e6e59de" [ 738.544324] env[62204]: _type = "Task" [ 738.544324] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.551557] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5286c0a9-82fa-8ecd-1fc8-eea72e6e59de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.624135] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 738.656978] env[62204]: DEBUG oslo_vmware.api [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199549, 'name': PowerOffVM_Task, 'duration_secs': 0.195572} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.657271] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.657447] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.657784] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1d591e7-5c45-4c0c-a2b1-a4dc50320225 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.725792] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.726026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.726355] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Deleting the datastore file [datastore2] 186a2de8-2b9e-4c84-8502-cb0ed3b43123 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.726611] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dfc51c2-c56f-4abd-a54d-f4dbceff74f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.733683] env[62204]: DEBUG oslo_vmware.api [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for the task: (returnval){ [ 738.733683] env[62204]: value = "task-1199551" [ 738.733683] env[62204]: _type = "Task" [ 738.733683] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.741673] env[62204]: DEBUG oslo_vmware.api [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.903357] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.903515] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquired lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.903691] env[62204]: DEBUG nova.network.neutron [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.914717] env[62204]: DEBUG nova.network.neutron [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Successfully created port: eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 738.966420] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 739.054613] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5286c0a9-82fa-8ecd-1fc8-eea72e6e59de, 'name': SearchDatastore_Task, 'duration_secs': 0.008101} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.058075] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-233443bd-d6dc-41ba-bf3f-035559464637 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.064999] env[62204]: DEBUG nova.compute.manager [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Received event network-vif-plugged-70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.065951] env[62204]: DEBUG oslo_concurrency.lockutils [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] Acquiring lock "69604167-6a61-4723-bf7d-7ba168837839-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.066472] env[62204]: DEBUG oslo_concurrency.lockutils [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] Lock "69604167-6a61-4723-bf7d-7ba168837839-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.066778] env[62204]: DEBUG oslo_concurrency.lockutils [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] Lock "69604167-6a61-4723-bf7d-7ba168837839-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.067103] env[62204]: DEBUG nova.compute.manager [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] No waiting events found dispatching network-vif-plugged-70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 739.070018] env[62204]: WARNING nova.compute.manager [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Received unexpected event network-vif-plugged-70a9a29f-b59c-48d3-8c34-c3bbff8169e0 for instance with vm_state building and task_state spawning. [ 739.070018] env[62204]: DEBUG nova.compute.manager [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Received event network-changed-70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.070018] env[62204]: DEBUG nova.compute.manager [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Refreshing instance network info cache due to event network-changed-70a9a29f-b59c-48d3-8c34-c3bbff8169e0. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 739.070018] env[62204]: DEBUG oslo_concurrency.lockutils [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] Acquiring lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 739.070018] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 739.070018] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c2f20-8bf4-f9af-74a0-2cdf991b8760" [ 739.070018] env[62204]: _type = "Task" [ 739.070018] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.084371] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c2f20-8bf4-f9af-74a0-2cdf991b8760, 'name': SearchDatastore_Task, 'duration_secs': 0.009292} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.084699] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.085089] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] a71fd192-f3b6-4f0f-900d-887d15f44d7a/a71fd192-f3b6-4f0f-900d-887d15f44d7a.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 739.085382] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9535fdb9-8825-4eb4-bee2-dfd00460c13c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.094334] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 739.094334] env[62204]: value = "task-1199552" [ 739.094334] env[62204]: _type = "Task" [ 739.094334] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.103224] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.142705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 739.206660] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 739.207751] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 739.207751] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 739.207978] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 739.208099] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 739.208257] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 739.208391] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 739.208593] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 739.208751] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 739.208987] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 739.213019] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 739.213019] env[62204]: DEBUG nova.virt.hardware [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 739.213019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce8e472-6bc6-49ee-ac0b-125efa58a296 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.223178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb84bafc-1c69-46fc-a908-17e2b91aa806 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.252634] env[62204]: DEBUG oslo_vmware.api [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Task: {'id': task-1199551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136353} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.252634] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.252634] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.254204] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.254316] env[62204]: INFO nova.compute.manager [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Took 1.13 seconds to destroy the instance on the hypervisor. [ 739.254913] env[62204]: DEBUG oslo.service.loopingcall [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.254913] env[62204]: DEBUG nova.compute.manager [-] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 739.254913] env[62204]: DEBUG nova.network.neutron [-] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 739.393817] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370cf47b-6067-4c30-9fbb-e14d1378971d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.401976] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b055c913-6750-4822-ba88-15bb84682adc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.432593] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a3b88c-7dff-4d50-82a0-022ff093eeea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.440554] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a421522-7d8a-4725-a6fc-bd243836fe51 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.456278] env[62204]: DEBUG nova.compute.provider_tree [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.458292] env[62204]: DEBUG nova.network.neutron [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.604384] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457986} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.604656] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] a71fd192-f3b6-4f0f-900d-887d15f44d7a/a71fd192-f3b6-4f0f-900d-887d15f44d7a.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 739.604879] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 739.605180] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8868c566-c53a-4bbf-8507-36579cfce93a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.611492] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 739.611492] env[62204]: value = "task-1199553" [ 739.611492] env[62204]: _type = "Task" [ 739.611492] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.622024] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.675229] env[62204]: DEBUG nova.network.neutron [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updating instance_info_cache with network_info: [{"id": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "address": "fa:16:3e:f9:be:71", "network": {"id": "75ae1e87-ea66-41e5-a2eb-4b54cfdc6a2b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1366063071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d52da8765c4d69b803b4f18d0d9bc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70a9a29f-b5", "ovs_interfaceid": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.715149] env[62204]: DEBUG nova.compute.manager [req-6f65c893-2204-438e-9135-44e92171d8dc req-317f0953-84c1-4287-a1af-1539b7bfb3bd service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Received event network-vif-deleted-0bda5859-fbaf-4c89-84b6-e50ec57665a5 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.715149] env[62204]: INFO nova.compute.manager [req-6f65c893-2204-438e-9135-44e92171d8dc req-317f0953-84c1-4287-a1af-1539b7bfb3bd service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Neutron deleted interface 0bda5859-fbaf-4c89-84b6-e50ec57665a5; detaching it from the instance and deleting it from the info cache [ 739.715149] env[62204]: DEBUG nova.network.neutron [req-6f65c893-2204-438e-9135-44e92171d8dc req-317f0953-84c1-4287-a1af-1539b7bfb3bd service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.962145] env[62204]: DEBUG nova.scheduler.client.report [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.979918] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 740.009630] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 740.009874] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 740.010042] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.010229] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 740.010372] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.010514] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 740.010752] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 740.010925] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 740.011103] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 740.011267] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 740.011436] env[62204]: DEBUG nova.virt.hardware [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 740.012566] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35af44ce-aa9f-4be2-8f13-d6acb83612b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.020700] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba0f0d50-bbad-47ac-8dcb-03a8eec527d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.120320] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067086} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.120604] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 740.121370] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bb0b55-0147-43da-a083-b0f21a64ec5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.143265] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] a71fd192-f3b6-4f0f-900d-887d15f44d7a/a71fd192-f3b6-4f0f-900d-887d15f44d7a.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 740.143566] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce29fe39-fb2f-4dd0-81c4-f776449ae8c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.157353] env[62204]: DEBUG nova.network.neutron [-] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.164115] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 740.164115] env[62204]: value = "task-1199554" [ 740.164115] env[62204]: _type = "Task" [ 740.164115] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.173980] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199554, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.178520] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Releasing lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 740.178828] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance network_info: |[{"id": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "address": "fa:16:3e:f9:be:71", "network": {"id": "75ae1e87-ea66-41e5-a2eb-4b54cfdc6a2b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1366063071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d52da8765c4d69b803b4f18d0d9bc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70a9a29f-b5", "ovs_interfaceid": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 740.179832] env[62204]: DEBUG oslo_concurrency.lockutils [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] Acquired lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.179832] env[62204]: DEBUG nova.network.neutron [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Refreshing network info cache for port 70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 740.181263] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:be:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d0c6fd7-3cc9-4818-9475-8f15900394cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70a9a29f-b59c-48d3-8c34-c3bbff8169e0', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.189029] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Creating folder: Project (53d52da8765c4d69b803b4f18d0d9bc2). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.189369] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82032887-6160-42f3-afcf-002572dd196a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.201967] env[62204]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 740.202137] env[62204]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62204) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 740.202442] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Folder already exists: Project (53d52da8765c4d69b803b4f18d0d9bc2). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 740.202624] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Creating folder: Instances. Parent ref: group-v259946. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.202841] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5407b8b0-e3ea-46c1-882a-ace219347cee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.211613] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Created folder: Instances in parent group-v259946. [ 740.211838] env[62204]: DEBUG oslo.service.loopingcall [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 740.212029] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.212248] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08079a5d-fca2-49fb-959c-5ede02284c41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.227268] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dccdd3f-a57e-4d8a-8109-c7f0984fe540 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.233694] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.233694] env[62204]: value = "task-1199557" [ 740.233694] env[62204]: _type = "Task" [ 740.233694] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.240111] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce810ff-5e00-417a-9bf8-13e5ca2a030b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.254924] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199557, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.263943] env[62204]: DEBUG nova.compute.manager [req-6f65c893-2204-438e-9135-44e92171d8dc req-317f0953-84c1-4287-a1af-1539b7bfb3bd service nova] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Detach interface failed, port_id=0bda5859-fbaf-4c89-84b6-e50ec57665a5, reason: Instance 186a2de8-2b9e-4c84-8502-cb0ed3b43123 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 740.467521] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.468144] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 740.472439] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.560s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.474161] env[62204]: INFO nova.compute.claims [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.659811] env[62204]: INFO nova.compute.manager [-] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Took 1.40 seconds to deallocate network for instance. [ 740.673681] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199554, 'name': ReconfigVM_Task, 'duration_secs': 0.319043} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.674194] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfigured VM instance instance-00000030 to attach disk [datastore2] a71fd192-f3b6-4f0f-900d-887d15f44d7a/a71fd192-f3b6-4f0f-900d-887d15f44d7a.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.674546] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ffaa463-128f-4b5d-b6e6-df12761b4e14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.682424] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 740.682424] env[62204]: value = "task-1199558" [ 740.682424] env[62204]: _type = "Task" [ 740.682424] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.692887] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199558, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.745135] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199557, 'name': CreateVM_Task, 'duration_secs': 0.356214} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.745305] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 740.746027] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'boot_index': 0, 'guest_format': None, 'attachment_id': '4921b09b-4c35-4547-87b6-473ec051d364', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-259949', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'name': 'volume-e1a1d343-79bf-455c-8446-09fa8e9f2035', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '69604167-6a61-4723-bf7d-7ba168837839', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'serial': 'e1a1d343-79bf-455c-8446-09fa8e9f2035'}, 'disk_bus': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62204) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 740.746248] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Root volume attach. Driver type: vmdk {{(pid=62204) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 740.746960] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59797810-914f-4eb2-9c9d-118bee68a136 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.758791] env[62204]: DEBUG nova.network.neutron [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Successfully updated port: eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 740.758791] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808776eb-c57b-4e60-b07d-2344c8eca111 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.763189] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940fdf32-9793-4404-a76f-c6f4dfa32476 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.771974] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-9789bf99-e831-4a2d-833d-3753a269f7f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.784168] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 740.784168] env[62204]: value = "task-1199559" [ 740.784168] env[62204]: _type = "Task" [ 740.784168] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.792961] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.932658] env[62204]: DEBUG nova.network.neutron [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updated VIF entry in instance network info cache for port 70a9a29f-b59c-48d3-8c34-c3bbff8169e0. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 740.933038] env[62204]: DEBUG nova.network.neutron [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updating instance_info_cache with network_info: [{"id": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "address": "fa:16:3e:f9:be:71", "network": {"id": "75ae1e87-ea66-41e5-a2eb-4b54cfdc6a2b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1366063071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d52da8765c4d69b803b4f18d0d9bc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70a9a29f-b5", "ovs_interfaceid": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.980324] env[62204]: DEBUG nova.compute.utils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.982938] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 740.983173] env[62204]: DEBUG nova.network.neutron [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 741.023229] env[62204]: DEBUG nova.policy [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce88cf098105490b947338839c5b183f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91bb4988d6ac4f998a79ed9ae1995c90', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 741.092529] env[62204]: DEBUG nova.compute.manager [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Received event network-vif-plugged-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.092790] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] Acquiring lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.092971] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.093329] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.093557] env[62204]: DEBUG nova.compute.manager [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] No waiting events found dispatching network-vif-plugged-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 741.093745] env[62204]: WARNING nova.compute.manager [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Received unexpected event network-vif-plugged-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 for instance with vm_state building and task_state spawning. [ 741.093907] env[62204]: DEBUG nova.compute.manager [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Received event network-changed-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.094077] env[62204]: DEBUG nova.compute.manager [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Refreshing instance network info cache due to event network-changed-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 741.094280] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] Acquiring lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.094416] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] Acquired lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.094573] env[62204]: DEBUG nova.network.neutron [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Refreshing network info cache for port eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 741.169624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.192865] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199558, 'name': Rename_Task, 'duration_secs': 0.162585} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.193184] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.193453] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-971c0b36-a259-4c1e-9273-aa1cfbd56c1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.200345] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 741.200345] env[62204]: value = "task-1199560" [ 741.200345] env[62204]: _type = "Task" [ 741.200345] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.209180] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199560, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.261220] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.295235] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 42%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.296401] env[62204]: DEBUG nova.network.neutron [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Successfully created port: 10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.435842] env[62204]: DEBUG oslo_concurrency.lockutils [req-fe4f5cbd-e4e6-4ce7-a4c4-52b13996dc59 req-e533721d-41f2-4f1b-bfaf-fad9949e5fba service nova] Releasing lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.484257] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 741.659732] env[62204]: DEBUG nova.network.neutron [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 741.710693] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199560, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.803263] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 54%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.816901] env[62204]: DEBUG nova.network.neutron [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.945721] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e08dcbe-ce63-4002-a5e6-cb0b0f2ba9b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.955560] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5d6fa5-1df1-4b61-8c27-d907e6f3b172 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.990945] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3240df8f-e6c4-46ce-9d48-420e3284faed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.003724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae9296f-5dfb-49d5-bc98-5598f6a883c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.018509] env[62204]: DEBUG nova.compute.provider_tree [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.212538] env[62204]: DEBUG oslo_vmware.api [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199560, 'name': PowerOnVM_Task, 'duration_secs': 0.554495} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.213056] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.213437] env[62204]: INFO nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Took 7.27 seconds to spawn the instance on the hypervisor. [ 742.213811] env[62204]: DEBUG nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 742.214803] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2567bab8-a59e-4d6a-9cfe-cbf8b4098666 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.298267] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 67%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.321059] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce465e0c-b891-4f76-9644-e9b157676687 req-be7c3081-0481-4647-87c8-9c489831dd87 service nova] Releasing lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.321059] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.321059] env[62204]: DEBUG nova.network.neutron [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 742.500250] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 742.521264] env[62204]: DEBUG nova.scheduler.client.report [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 742.527025] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 742.527025] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 742.527025] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.527448] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 742.527448] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.527448] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 742.527753] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 742.527799] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 742.527969] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 742.528374] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 742.528374] env[62204]: DEBUG nova.virt.hardware [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 742.529207] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53332d3c-d726-49de-84c8-9c00559d2ffd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.538271] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941daf46-710a-4871-9c2d-24a76fb8c19f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.738875] env[62204]: INFO nova.compute.manager [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Took 29.11 seconds to build instance. [ 742.800139] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 82%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.805332] env[62204]: DEBUG nova.compute.manager [req-8b17abe3-9590-41ea-bcf0-e5c5988f9c0f req-b85d36b2-0c23-437a-a6f6-840492d3199d service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Received event network-vif-plugged-10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 742.805332] env[62204]: DEBUG oslo_concurrency.lockutils [req-8b17abe3-9590-41ea-bcf0-e5c5988f9c0f req-b85d36b2-0c23-437a-a6f6-840492d3199d service nova] Acquiring lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.805332] env[62204]: DEBUG oslo_concurrency.lockutils [req-8b17abe3-9590-41ea-bcf0-e5c5988f9c0f req-b85d36b2-0c23-437a-a6f6-840492d3199d service nova] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 742.805565] env[62204]: DEBUG oslo_concurrency.lockutils [req-8b17abe3-9590-41ea-bcf0-e5c5988f9c0f req-b85d36b2-0c23-437a-a6f6-840492d3199d service nova] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.805886] env[62204]: DEBUG nova.compute.manager [req-8b17abe3-9590-41ea-bcf0-e5c5988f9c0f req-b85d36b2-0c23-437a-a6f6-840492d3199d service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] No waiting events found dispatching network-vif-plugged-10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 742.806293] env[62204]: WARNING nova.compute.manager [req-8b17abe3-9590-41ea-bcf0-e5c5988f9c0f req-b85d36b2-0c23-437a-a6f6-840492d3199d service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Received unexpected event network-vif-plugged-10a18bfc-ebd6-4f8a-af35-34114768d9be for instance with vm_state building and task_state spawning. [ 742.862205] env[62204]: DEBUG nova.network.neutron [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.867855] env[62204]: DEBUG nova.network.neutron [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Successfully updated port: 10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.033748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.034356] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 743.038418] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.575s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.039908] env[62204]: INFO nova.compute.claims [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.144459] env[62204]: DEBUG nova.network.neutron [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updating instance_info_cache with network_info: [{"id": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "address": "fa:16:3e:37:f2:59", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8a6d2-9e", "ovs_interfaceid": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.240931] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3212e179-bd62-4843-9720-8dda40d37dd9 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.259s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.297778] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.371524] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "refresh_cache-eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 743.371627] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquired lock "refresh_cache-eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.371784] env[62204]: DEBUG nova.network.neutron [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 743.551902] env[62204]: DEBUG nova.compute.utils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 743.553584] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 743.553782] env[62204]: DEBUG nova.network.neutron [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 743.593601] env[62204]: DEBUG nova.policy [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '817e2eaba86149789ff8d3ff69f15489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8108a8f6b5e04832aab188333bad1e0e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 743.650261] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.650541] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Instance network_info: |[{"id": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "address": "fa:16:3e:37:f2:59", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8a6d2-9e", "ovs_interfaceid": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 743.650956] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:f2:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd098b1c-636f-492d-b5ae-037cb0cae454', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaf8a6d2-9ed4-4008-9072-23d2ac93fc16', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.658602] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Creating folder: Project (286b300e98e244eb8693bb0f3174c121). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.659286] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be3a8952-9430-4326-88f3-d11ac1c085be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.670102] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Created folder: Project (286b300e98e244eb8693bb0f3174c121) in parent group-v259933. [ 743.670299] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Creating folder: Instances. Parent ref: group-v259986. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.670523] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bb85369-62f1-4f55-bcee-f6a2205b1768 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.680195] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Created folder: Instances in parent group-v259986. [ 743.680344] env[62204]: DEBUG oslo.service.loopingcall [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.680539] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.680764] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5449112-c49b-46be-9c47-699d732571b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.699985] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.699985] env[62204]: value = "task-1199563" [ 743.699985] env[62204]: _type = "Task" [ 743.699985] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.708044] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199563, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.744768] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 743.800167] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 97%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.946877] env[62204]: DEBUG nova.network.neutron [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 744.043255] env[62204]: DEBUG nova.network.neutron [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Successfully created port: 52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.059242] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 744.178666] env[62204]: DEBUG nova.network.neutron [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Updating instance_info_cache with network_info: [{"id": "10a18bfc-ebd6-4f8a-af35-34114768d9be", "address": "fa:16:3e:d7:38:09", "network": {"id": "546fe35f-97b7-40e8-b71e-9ad8b53f726e", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1513772706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91bb4988d6ac4f998a79ed9ae1995c90", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10a18bfc-eb", "ovs_interfaceid": "10a18bfc-ebd6-4f8a-af35-34114768d9be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.213172] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199563, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.276774] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.300085] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 97%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.554442] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29d0b5b-2211-440b-9689-81d5c9354026 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.568175] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8aeee9-a96d-42f2-80fe-ba8adc429b78 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.607652] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2573e01-e705-4de1-b808-e52b98ce6a16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.616349] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d453ec-c813-41bb-bffa-64966d601f5c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.636845] env[62204]: DEBUG nova.compute.provider_tree [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.682204] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Releasing lock "refresh_cache-eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 744.682530] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Instance network_info: |[{"id": "10a18bfc-ebd6-4f8a-af35-34114768d9be", "address": "fa:16:3e:d7:38:09", "network": {"id": "546fe35f-97b7-40e8-b71e-9ad8b53f726e", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1513772706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91bb4988d6ac4f998a79ed9ae1995c90", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10a18bfc-eb", "ovs_interfaceid": "10a18bfc-ebd6-4f8a-af35-34114768d9be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 744.683432] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:38:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10a18bfc-ebd6-4f8a-af35-34114768d9be', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 744.691230] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Creating folder: Project (91bb4988d6ac4f998a79ed9ae1995c90). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 744.691536] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e1b38cf9-263b-47aa-8042-1883e838bf58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.702787] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Created folder: Project (91bb4988d6ac4f998a79ed9ae1995c90) in parent group-v259933. [ 744.702971] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Creating folder: Instances. Parent ref: group-v259989. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 744.707786] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37d66966-c8a2-4719-b6d3-2df0e4183ee2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.714590] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199563, 'name': CreateVM_Task, 'duration_secs': 0.670796} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.716099] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.716754] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Created folder: Instances in parent group-v259989. [ 744.717037] env[62204]: DEBUG oslo.service.loopingcall [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 744.717649] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 744.717801] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.718120] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 744.718341] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 744.718526] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-710b2cbd-a0e2-4ce9-9ec4-747b9d522712 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.720051] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b9cf70e-91bc-4ccf-86bb-c2e573b021f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.481788] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 745.484401] env[62204]: DEBUG nova.scheduler.client.report [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.488673] env[62204]: DEBUG nova.compute.manager [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Received event network-changed-10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.488849] env[62204]: DEBUG nova.compute.manager [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Refreshing instance network info cache due to event network-changed-10a18bfc-ebd6-4f8a-af35-34114768d9be. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 745.489066] env[62204]: DEBUG oslo_concurrency.lockutils [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] Acquiring lock "refresh_cache-eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.489208] env[62204]: DEBUG oslo_concurrency.lockutils [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] Acquired lock "refresh_cache-eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.489364] env[62204]: DEBUG nova.network.neutron [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Refreshing network info cache for port 10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 745.496286] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 745.496286] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526a1046-c2a0-c676-9ddf-bbc2695a0476" [ 745.496286] env[62204]: _type = "Task" [ 745.496286] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.503828] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.503828] env[62204]: value = "task-1199566" [ 745.503828] env[62204]: _type = "Task" [ 745.503828] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.504025] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 98%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.512692] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526a1046-c2a0-c676-9ddf-bbc2695a0476, 'name': SearchDatastore_Task, 'duration_secs': 0.00952} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.514401] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 745.514401] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.514502] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.514618] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.514887] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.515326] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ae409ae-c94f-47f7-bae3-0a496f3f1e46 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.521228] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199566, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.524103] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.524435] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.524541] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.524721] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.524937] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.525016] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.525239] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.525402] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.525561] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.525711] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.525875] env[62204]: DEBUG nova.virt.hardware [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.526891] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791bd1f9-b0ff-40e4-951e-ae912d3d4f9f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.530464] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.530637] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 745.531645] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ee9297e-93be-4d1e-9421-96ade671b2f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.537783] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094cd218-1407-4726-841a-0d60a05de520 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.542761] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 745.542761] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5228b31d-b2d1-207f-46b4-f85dd83133fa" [ 745.542761] env[62204]: _type = "Task" [ 745.542761] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.558107] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5228b31d-b2d1-207f-46b4-f85dd83133fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009722} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.558892] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ece237c-7ef9-40fc-830c-e55c543ca0d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.564420] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 745.564420] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524a2860-f023-0a06-ad05-6f12e20025bc" [ 745.564420] env[62204]: _type = "Task" [ 745.564420] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.572337] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524a2860-f023-0a06-ad05-6f12e20025bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.869219] env[62204]: DEBUG nova.compute.manager [req-db67ed88-9852-45a0-b328-09ec80f578c5 req-f772702a-bc19-4a45-8904-093fcd8282c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-vif-plugged-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 745.869219] env[62204]: DEBUG oslo_concurrency.lockutils [req-db67ed88-9852-45a0-b328-09ec80f578c5 req-f772702a-bc19-4a45-8904-093fcd8282c8 service nova] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 745.869219] env[62204]: DEBUG oslo_concurrency.lockutils [req-db67ed88-9852-45a0-b328-09ec80f578c5 req-f772702a-bc19-4a45-8904-093fcd8282c8 service nova] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.869219] env[62204]: DEBUG oslo_concurrency.lockutils [req-db67ed88-9852-45a0-b328-09ec80f578c5 req-f772702a-bc19-4a45-8904-093fcd8282c8 service nova] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.869598] env[62204]: DEBUG nova.compute.manager [req-db67ed88-9852-45a0-b328-09ec80f578c5 req-f772702a-bc19-4a45-8904-093fcd8282c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] No waiting events found dispatching network-vif-plugged-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 745.869949] env[62204]: WARNING nova.compute.manager [req-db67ed88-9852-45a0-b328-09ec80f578c5 req-f772702a-bc19-4a45-8904-093fcd8282c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received unexpected event network-vif-plugged-52d592a0-434a-4f17-8db6-39bf5d505429 for instance with vm_state building and task_state spawning. [ 745.994080] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.953s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.994080] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 745.995135] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task} progress is 98%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.995830] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.320s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 745.996538] env[62204]: DEBUG nova.objects.instance [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lazy-loading 'resources' on Instance uuid 48fe8f43-4ab9-41de-9b81-35b4438585ea {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 746.021493] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199566, 'name': CreateVM_Task, 'duration_secs': 0.340132} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.022252] env[62204]: DEBUG nova.network.neutron [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Successfully updated port: 52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.024323] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.026535] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.026535] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.026696] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 746.027794] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07fae681-10fa-4008-a97a-a605e512e363 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.035274] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 746.035274] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a2e996-64e4-234e-f156-a097687d86c1" [ 746.035274] env[62204]: _type = "Task" [ 746.035274] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.047872] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a2e996-64e4-234e-f156-a097687d86c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.078023] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524a2860-f023-0a06-ad05-6f12e20025bc, 'name': SearchDatastore_Task, 'duration_secs': 0.011948} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.078023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.078023] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 51c9e353-f2cf-41b4-b37e-1cfd5dca0518/51c9e353-f2cf-41b4-b37e-1cfd5dca0518.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 746.078023] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a0bfc52-758b-4b4a-8e43-dc6a8f855203 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.084332] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 746.084332] env[62204]: value = "task-1199567" [ 746.084332] env[62204]: _type = "Task" [ 746.084332] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.092233] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199567, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.246306] env[62204]: DEBUG nova.network.neutron [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Updated VIF entry in instance network info cache for port 10a18bfc-ebd6-4f8a-af35-34114768d9be. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 746.246787] env[62204]: DEBUG nova.network.neutron [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Updating instance_info_cache with network_info: [{"id": "10a18bfc-ebd6-4f8a-af35-34114768d9be", "address": "fa:16:3e:d7:38:09", "network": {"id": "546fe35f-97b7-40e8-b71e-9ad8b53f726e", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1513772706-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91bb4988d6ac4f998a79ed9ae1995c90", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10a18bfc-eb", "ovs_interfaceid": "10a18bfc-ebd6-4f8a-af35-34114768d9be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.491881] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199559, 'name': RelocateVM_Task, 'duration_secs': 5.278457} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.492265] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 746.492509] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-259949', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'name': 'volume-e1a1d343-79bf-455c-8446-09fa8e9f2035', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '69604167-6a61-4723-bf7d-7ba168837839', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'serial': 'e1a1d343-79bf-455c-8446-09fa8e9f2035'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 746.493444] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58b146a-dc92-41bc-8e29-018ad1e5be97 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.499148] env[62204]: DEBUG nova.compute.utils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.499148] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 746.499404] env[62204]: DEBUG nova.network.neutron [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.520920] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be2c29f-bc7b-4dcb-90af-7d195259b697 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.525687] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.525848] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.526029] env[62204]: DEBUG nova.network.neutron [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 746.545880] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] volume-e1a1d343-79bf-455c-8446-09fa8e9f2035/volume-e1a1d343-79bf-455c-8446-09fa8e9f2035.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 746.553570] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f8cb9a5-f193-4329-8388-df9a9a3eb938 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.568894] env[62204]: DEBUG nova.policy [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d744cd3a45e4a6da81f4b09a62836d7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64447421900e4b709582ce52efcb7a68', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 746.577623] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a2e996-64e4-234e-f156-a097687d86c1, 'name': SearchDatastore_Task, 'duration_secs': 0.011684} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.582272] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.582500] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.582735] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.582880] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.583097] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.583660] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 746.583660] env[62204]: value = "task-1199568" [ 746.583660] env[62204]: _type = "Task" [ 746.583660] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.584051] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a08f1e24-3d05-4095-897e-b8288ab98875 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.603744] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.604345] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199567, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455403} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.605432] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 51c9e353-f2cf-41b4-b37e-1cfd5dca0518/51c9e353-f2cf-41b4-b37e-1cfd5dca0518.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 746.605655] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 746.605908] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.606066] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.606760] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-101f5af2-07fb-4f98-ba0f-8182a38c038f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.608559] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b85d26-d078-4c10-8464-17f6330e69c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.617037] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 746.617037] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5250f4a8-2e16-e04a-6a30-a332e3efd3f6" [ 746.617037] env[62204]: _type = "Task" [ 746.617037] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.619289] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 746.619289] env[62204]: value = "task-1199569" [ 746.619289] env[62204]: _type = "Task" [ 746.619289] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.714063] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5250f4a8-2e16-e04a-6a30-a332e3efd3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.012489} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.714063] env[62204]: DEBUG nova.network.neutron [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.714063] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199569, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.714063] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0699c7aa-0e91-454e-a0ef-a041759962a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.714063] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 746.714063] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5214bb5b-bd21-f642-2995-93cb10c63c38" [ 746.714063] env[62204]: _type = "Task" [ 746.714063] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.714063] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5214bb5b-bd21-f642-2995-93cb10c63c38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.749757] env[62204]: DEBUG oslo_concurrency.lockutils [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] Releasing lock "refresh_cache-eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.749890] env[62204]: DEBUG nova.compute.manager [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-changed-c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 746.750091] env[62204]: DEBUG nova.compute.manager [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing instance network info cache due to event network-changed-c81069ae-b3b9-4b0d-902f-ed9a2e24542f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 746.750298] env[62204]: DEBUG oslo_concurrency.lockutils [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.750450] env[62204]: DEBUG oslo_concurrency.lockutils [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.750615] env[62204]: DEBUG nova.network.neutron [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing network info cache for port c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.849799] env[62204]: DEBUG nova.network.neutron [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.918611] env[62204]: DEBUG nova.network.neutron [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Successfully created port: efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.004130] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 747.085124] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f673b9-8f74-4f59-aceb-e9eefcaf581d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.098734] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5e992a-e131-4e15-98d6-b7e0649f88b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.102143] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199568, 'name': ReconfigVM_Task, 'duration_secs': 0.281004} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.102426] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Reconfigured VM instance instance-00000031 to attach disk [datastore2] volume-e1a1d343-79bf-455c-8446-09fa8e9f2035/volume-e1a1d343-79bf-455c-8446-09fa8e9f2035.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 747.107206] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b94b036b-a8c3-4d76-8f5e-37f09093f87e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.146892] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f079fdc-8ff3-46e7-8027-2a87b16e8371 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.150336] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 747.150336] env[62204]: value = "task-1199570" [ 747.150336] env[62204]: _type = "Task" [ 747.150336] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.162071] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199569, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079778} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.163690] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 747.164903] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698306ba-39ef-4fb7-b000-6f69b2436564 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.177385] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fc3216-e9b6-4a22-8851-171662bfbfd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.180231] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5214bb5b-bd21-f642-2995-93cb10c63c38, 'name': SearchDatastore_Task, 'duration_secs': 0.011942} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.181221] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.181578] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948/eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.182200] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c99352fc-8f25-4597-9ff6-c35c60adc4b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.204542] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199570, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.214920] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 51c9e353-f2cf-41b4-b37e-1cfd5dca0518/51c9e353-f2cf-41b4-b37e-1cfd5dca0518.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.214920] env[62204]: DEBUG nova.compute.provider_tree [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.216282] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a57f8afc-c8d4-4657-9ad6-c00f00f05d04 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.232238] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 747.232238] env[62204]: value = "task-1199571" [ 747.232238] env[62204]: _type = "Task" [ 747.232238] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.237938] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 747.237938] env[62204]: value = "task-1199572" [ 747.237938] env[62204]: _type = "Task" [ 747.237938] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.244276] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.248498] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199572, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.355319] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.355635] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance network_info: |[{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 747.356161] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:f8:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52d592a0-434a-4f17-8db6-39bf5d505429', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.363355] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating folder: Project (8108a8f6b5e04832aab188333bad1e0e). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.363641] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfc04ec9-0e5e-4cc5-9bb1-d445ed70c000 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.373075] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created folder: Project (8108a8f6b5e04832aab188333bad1e0e) in parent group-v259933. [ 747.373291] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating folder: Instances. Parent ref: group-v259992. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.375492] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c0a303d-a5ee-4bba-bad0-388d18c9f2e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.383699] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created folder: Instances in parent group-v259992. [ 747.383898] env[62204]: DEBUG oslo.service.loopingcall [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 747.384114] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.384308] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4647bba2-f08c-48da-b94d-63d4ac158ee9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.404152] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.404152] env[62204]: value = "task-1199575" [ 747.404152] env[62204]: _type = "Task" [ 747.404152] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.416229] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199575, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.550130] env[62204]: DEBUG nova.network.neutron [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updated VIF entry in instance network info cache for port c81069ae-b3b9-4b0d-902f-ed9a2e24542f. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 747.550600] env[62204]: DEBUG nova.network.neutron [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.665796] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199570, 'name': ReconfigVM_Task, 'duration_secs': 0.294576} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.666167] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-259949', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'name': 'volume-e1a1d343-79bf-455c-8446-09fa8e9f2035', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '69604167-6a61-4723-bf7d-7ba168837839', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'serial': 'e1a1d343-79bf-455c-8446-09fa8e9f2035'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 747.666781] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9909a45c-1aa8-4c3d-8c71-ba9249843a03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.676053] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 747.676053] env[62204]: value = "task-1199576" [ 747.676053] env[62204]: _type = "Task" [ 747.676053] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.688258] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199576, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.732966] env[62204]: DEBUG nova.scheduler.client.report [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.750885] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199571, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.751327] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.906702] env[62204]: DEBUG nova.compute.manager [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 747.906967] env[62204]: DEBUG nova.compute.manager [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing instance network info cache due to event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 747.907643] env[62204]: DEBUG oslo_concurrency.lockutils [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.907643] env[62204]: DEBUG oslo_concurrency.lockutils [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.907643] env[62204]: DEBUG nova.network.neutron [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 747.920727] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199575, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.021155] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 748.045836] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.046186] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.046366] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.046696] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.046746] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.046898] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.047254] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.047373] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.047589] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.047846] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.047933] env[62204]: DEBUG nova.virt.hardware [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.049295] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c470224-9f91-453e-9c52-5f81a972f9a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.054307] env[62204]: DEBUG oslo_concurrency.lockutils [req-9bd9301c-0482-4fd9-9e35-285289ee7a73 req-69fc0eda-0234-42ff-9a9f-434f5bba6840 service nova] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.058255] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eba528c-c483-4098-8960-1b44a127ac26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.186752] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199576, 'name': Rename_Task, 'duration_secs': 0.290416} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.187104] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.187534] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d256d4c-f5ed-42b7-87d2-7de7c2f19845 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.194594] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 748.194594] env[62204]: value = "task-1199577" [ 748.194594] env[62204]: _type = "Task" [ 748.194594] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.203258] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.247501] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.254775] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.361s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.254775] env[62204]: DEBUG nova.objects.instance [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lazy-loading 'resources' on Instance uuid 19326d9f-5f3a-4756-874f-d4d3ce25f8e8 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.255208] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576648} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.255439] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948/eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 748.255537] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 748.255755] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00e01a00-fae0-4671-a2b6-676d84a8c617 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.261532] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199572, 'name': ReconfigVM_Task, 'duration_secs': 0.706033} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.262249] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 51c9e353-f2cf-41b4-b37e-1cfd5dca0518/51c9e353-f2cf-41b4-b37e-1cfd5dca0518.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.263501] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e3db2f7-7041-4c4f-97a3-1e5ca92e653b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.267904] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 748.267904] env[62204]: value = "task-1199578" [ 748.267904] env[62204]: _type = "Task" [ 748.267904] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.275263] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 748.275263] env[62204]: value = "task-1199579" [ 748.275263] env[62204]: _type = "Task" [ 748.275263] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.283859] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199578, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.287610] env[62204]: INFO nova.scheduler.client.report [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleted allocations for instance 48fe8f43-4ab9-41de-9b81-35b4438585ea [ 748.289091] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199579, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.419132] env[62204]: DEBUG nova.compute.manager [req-d92161a9-1e62-46c0-b3b1-eb1987343aa0 req-3856c0f9-6620-4f8a-8ace-21903e39f732 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Received event network-vif-plugged-efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.419355] env[62204]: DEBUG oslo_concurrency.lockutils [req-d92161a9-1e62-46c0-b3b1-eb1987343aa0 req-3856c0f9-6620-4f8a-8ace-21903e39f732 service nova] Acquiring lock "55d1649c-5eff-4264-bce1-dd907f9531f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.419564] env[62204]: DEBUG oslo_concurrency.lockutils [req-d92161a9-1e62-46c0-b3b1-eb1987343aa0 req-3856c0f9-6620-4f8a-8ace-21903e39f732 service nova] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.419725] env[62204]: DEBUG oslo_concurrency.lockutils [req-d92161a9-1e62-46c0-b3b1-eb1987343aa0 req-3856c0f9-6620-4f8a-8ace-21903e39f732 service nova] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.419926] env[62204]: DEBUG nova.compute.manager [req-d92161a9-1e62-46c0-b3b1-eb1987343aa0 req-3856c0f9-6620-4f8a-8ace-21903e39f732 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] No waiting events found dispatching network-vif-plugged-efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 748.420322] env[62204]: WARNING nova.compute.manager [req-d92161a9-1e62-46c0-b3b1-eb1987343aa0 req-3856c0f9-6620-4f8a-8ace-21903e39f732 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Received unexpected event network-vif-plugged-efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 for instance with vm_state building and task_state spawning. [ 748.425608] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199575, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.516174] env[62204]: DEBUG nova.network.neutron [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Successfully updated port: efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.700592] env[62204]: DEBUG nova.network.neutron [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updated VIF entry in instance network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 748.701016] env[62204]: DEBUG nova.network.neutron [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.708861] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199577, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.777663] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199578, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203342} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.781132] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.785530] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae62934-1d11-49dc-92a4-38341c398426 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.795924] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199579, 'name': Rename_Task, 'duration_secs': 0.372275} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.806394] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 748.815049] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948/eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.818284] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5a984a81-b519-4b07-9656-0f9b71c94b92 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "48fe8f43-4ab9-41de-9b81-35b4438585ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.406s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.819271] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03c7e5fe-0211-420c-af17-8570157b3ee4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.820901] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fe26476-8f4c-4a9d-862e-7f46b932b9a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.844483] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 748.844483] env[62204]: value = "task-1199580" [ 748.844483] env[62204]: _type = "Task" [ 748.844483] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.846829] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 748.846829] env[62204]: value = "task-1199581" [ 748.846829] env[62204]: _type = "Task" [ 748.846829] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.866447] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.866731] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199580, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.921637] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199575, 'name': CreateVM_Task, 'duration_secs': 1.179804} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.921761] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.922525] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.922690] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.922995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 748.924025] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d2a67fe-f5c9-4c3a-975a-e47dc52fa643 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.928200] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 748.928200] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f89574-ee2e-a260-6ace-a9bc4bb8c2d9" [ 748.928200] env[62204]: _type = "Task" [ 748.928200] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.938210] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f89574-ee2e-a260-6ace-a9bc4bb8c2d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.021954] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "refresh_cache-55d1649c-5eff-4264-bce1-dd907f9531f2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.022218] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquired lock "refresh_cache-55d1649c-5eff-4264-bce1-dd907f9531f2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.022851] env[62204]: DEBUG nova.network.neutron [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.207425] env[62204]: DEBUG oslo_concurrency.lockutils [req-533b97c4-09f2-44a9-bbe8-ee12859e3745 req-b4c42531-b80b-445c-96e5-fd1211b4f219 service nova] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.207862] env[62204]: DEBUG oslo_vmware.api [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199577, 'name': PowerOnVM_Task, 'duration_secs': 1.001292} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.208032] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.209042] env[62204]: INFO nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Took 10.00 seconds to spawn the instance on the hypervisor. [ 749.209042] env[62204]: DEBUG nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 749.209236] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b698f1-4bec-461f-abb1-495d1421c855 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.251084] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3486d886-54b8-4b3d-bcba-fd4216ad3706 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.260212] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c040138-4ada-4e0e-b0cb-9df3572bac31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.293919] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb2771d-4e48-442f-9860-c2ccf1a65c45 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.302960] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11b44d7-f33b-48fe-aa79-d42376f91410 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.316362] env[62204]: DEBUG nova.compute.provider_tree [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.362156] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199580, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.364968] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199581, 'name': ReconfigVM_Task, 'duration_secs': 0.488297} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.365408] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Reconfigured VM instance instance-00000033 to attach disk [datastore2] eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948/eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 749.366078] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-35eabbfa-2e74-4913-930a-8e2c2f052d68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.373265] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 749.373265] env[62204]: value = "task-1199582" [ 749.373265] env[62204]: _type = "Task" [ 749.373265] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.386431] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199582, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.442063] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f89574-ee2e-a260-6ace-a9bc4bb8c2d9, 'name': SearchDatastore_Task, 'duration_secs': 0.028435} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.442470] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.442793] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.443090] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.443291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.443534] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.443875] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c29c87cf-883f-49ee-94f9-921be27ef375 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.456574] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.456574] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.456574] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2de995ed-c14f-4d81-aba9-725238651309 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.463031] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 749.463031] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52efb7fd-c6da-23e6-81e0-b8591187eb14" [ 749.463031] env[62204]: _type = "Task" [ 749.463031] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.471406] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52efb7fd-c6da-23e6-81e0-b8591187eb14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.583223] env[62204]: DEBUG nova.network.neutron [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.737190] env[62204]: INFO nova.compute.manager [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Took 32.99 seconds to build instance. [ 749.819950] env[62204]: DEBUG nova.scheduler.client.report [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 749.853975] env[62204]: DEBUG nova.network.neutron [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Updating instance_info_cache with network_info: [{"id": "efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6", "address": "fa:16:3e:e4:f1:65", "network": {"id": "e77d2ead-3a12-48b5-8a98-f4278c246ad3", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-698776275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64447421900e4b709582ce52efcb7a68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefd4ced4-49", "ovs_interfaceid": "efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.858431] env[62204]: DEBUG oslo_vmware.api [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199580, 'name': PowerOnVM_Task, 'duration_secs': 0.599629} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.858890] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.859103] env[62204]: INFO nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Took 9.88 seconds to spawn the instance on the hypervisor. [ 749.859282] env[62204]: DEBUG nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 749.860057] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275c4188-4c00-4df1-ac88-60638b4a4a31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.883874] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199582, 'name': Rename_Task, 'duration_secs': 0.180961} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.884211] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 749.884462] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25f2bc81-a422-47ff-b7d9-b20d588e19f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.891438] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 749.891438] env[62204]: value = "task-1199583" [ 749.891438] env[62204]: _type = "Task" [ 749.891438] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.903819] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.957652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "432115aa-8999-40fe-a0cb-31433575c912" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.957898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "432115aa-8999-40fe-a0cb-31433575c912" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.958295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "432115aa-8999-40fe-a0cb-31433575c912-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.958507] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "432115aa-8999-40fe-a0cb-31433575c912-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.958684] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "432115aa-8999-40fe-a0cb-31433575c912-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.961047] env[62204]: INFO nova.compute.manager [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Terminating instance [ 749.962888] env[62204]: DEBUG nova.compute.manager [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 749.963123] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.964334] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bed6046-a056-4b40-8e4f-b78b24f32d4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.977375] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52efb7fd-c6da-23e6-81e0-b8591187eb14, 'name': SearchDatastore_Task, 'duration_secs': 0.011978} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.980024] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 749.980653] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0288bbb-eaa6-4e50-8e18-325f0f47e2b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.982872] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-692d4f01-b1b0-4340-83e2-9e1abcbf56cf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.986899] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 749.986899] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52086eda-2713-85ad-d43c-4ad9203cfcea" [ 749.986899] env[62204]: _type = "Task" [ 749.986899] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.991280] env[62204]: DEBUG oslo_vmware.api [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 749.991280] env[62204]: value = "task-1199584" [ 749.991280] env[62204]: _type = "Task" [ 749.991280] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.997281] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52086eda-2713-85ad-d43c-4ad9203cfcea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.002028] env[62204]: DEBUG oslo_vmware.api [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.242556] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fda08738-2af5-42a6-8b20-b437e9405d40 tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "69604167-6a61-4723-bf7d-7ba168837839" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.916s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.326795] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.072s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.328620] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.414s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.329546] env[62204]: DEBUG nova.objects.instance [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lazy-loading 'resources' on Instance uuid 7c21539c-35fa-4f58-beb0-e965ffaf79af {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.360376] env[62204]: INFO nova.scheduler.client.report [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Deleted allocations for instance 19326d9f-5f3a-4756-874f-d4d3ce25f8e8 [ 750.361587] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Releasing lock "refresh_cache-55d1649c-5eff-4264-bce1-dd907f9531f2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.361883] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Instance network_info: |[{"id": "efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6", "address": "fa:16:3e:e4:f1:65", "network": {"id": "e77d2ead-3a12-48b5-8a98-f4278c246ad3", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-698776275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64447421900e4b709582ce52efcb7a68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefd4ced4-49", "ovs_interfaceid": "efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 750.369607] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:f1:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8e7f6f41-f4eb-4832-a390-730fca1cf717', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.382351] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Creating folder: Project (64447421900e4b709582ce52efcb7a68). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.389119] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dda93b9d-8e86-4863-b4cc-bc7fdf26d3eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.400115] env[62204]: INFO nova.compute.manager [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Took 33.24 seconds to build instance. [ 750.410024] env[62204]: DEBUG oslo_vmware.api [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199583, 'name': PowerOnVM_Task, 'duration_secs': 0.488475} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.410305] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Created folder: Project (64447421900e4b709582ce52efcb7a68) in parent group-v259933. [ 750.410680] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Creating folder: Instances. Parent ref: group-v259995. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 750.410939] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 750.411187] env[62204]: INFO nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Took 7.91 seconds to spawn the instance on the hypervisor. [ 750.411289] env[62204]: DEBUG nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 750.411551] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5da454e0-8f44-491a-8cd6-d4d204980844 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.413829] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c298829-a81d-4ab5-87ca-d64fcd8fcf87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.425982] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Created folder: Instances in parent group-v259995. [ 750.425982] env[62204]: DEBUG oslo.service.loopingcall [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.426158] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.426354] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81b277d2-6b8b-42af-9e54-0e3d3d9cec8d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.448650] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.448650] env[62204]: value = "task-1199587" [ 750.448650] env[62204]: _type = "Task" [ 750.448650] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.457746] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199587, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.505694] env[62204]: DEBUG oslo_vmware.api [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199584, 'name': PowerOffVM_Task, 'duration_secs': 0.207422} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.505996] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52086eda-2713-85ad-d43c-4ad9203cfcea, 'name': SearchDatastore_Task, 'duration_secs': 0.012516} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.506300] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.506509] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.506808] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.507096] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.507427] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e4612e9-a4f1-440f-a01f-5591b199ee29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.510067] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15b17b04-7131-420a-8def-606245d4c715 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.517131] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 750.517131] env[62204]: value = "task-1199588" [ 750.517131] env[62204]: _type = "Task" [ 750.517131] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.526450] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.577163] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.577741] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.577952] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleting the datastore file [datastore1] 432115aa-8999-40fe-a0cb-31433575c912 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.578247] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33466e38-e398-47b9-9ab0-0c3e156bce05 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.584706] env[62204]: DEBUG oslo_vmware.api [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for the task: (returnval){ [ 750.584706] env[62204]: value = "task-1199590" [ 750.584706] env[62204]: _type = "Task" [ 750.584706] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.592641] env[62204]: DEBUG oslo_vmware.api [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.595260] env[62204]: DEBUG nova.compute.manager [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Received event network-changed-efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 750.595568] env[62204]: DEBUG nova.compute.manager [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Refreshing instance network info cache due to event network-changed-efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 750.595631] env[62204]: DEBUG oslo_concurrency.lockutils [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] Acquiring lock "refresh_cache-55d1649c-5eff-4264-bce1-dd907f9531f2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.595777] env[62204]: DEBUG oslo_concurrency.lockutils [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] Acquired lock "refresh_cache-55d1649c-5eff-4264-bce1-dd907f9531f2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.595930] env[62204]: DEBUG nova.network.neutron [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Refreshing network info cache for port efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 750.745856] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 750.893040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-616af2c4-86ff-4ef6-ba5b-b2d2f236d0eb tempest-ServerMetadataNegativeTestJSON-1285377913 tempest-ServerMetadataNegativeTestJSON-1285377913-project-member] Lock "19326d9f-5f3a-4756-874f-d4d3ce25f8e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.915s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.903813] env[62204]: DEBUG oslo_concurrency.lockutils [None req-27ffcc16-05d8-4414-9a87-5e87eac695e6 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.919s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.939132] env[62204]: INFO nova.compute.manager [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Took 31.46 seconds to build instance. [ 750.964353] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199587, 'name': CreateVM_Task, 'duration_secs': 0.371738} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.964538] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.965269] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.965440] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.965833] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 750.966477] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46ece2df-1ec9-4ced-ab5f-3f50b80e1b0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.975920] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 750.975920] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eef8a8-0c19-13f7-00fb-f823e493ebde" [ 750.975920] env[62204]: _type = "Task" [ 750.975920] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.985820] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eef8a8-0c19-13f7-00fb-f823e493ebde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.030493] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199588, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.094304] env[62204]: DEBUG oslo_vmware.api [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Task: {'id': task-1199590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265849} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.094847] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.094979] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.095443] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.095749] env[62204]: INFO nova.compute.manager [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Took 1.13 seconds to destroy the instance on the hypervisor. [ 751.096138] env[62204]: DEBUG oslo.service.loopingcall [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.099756] env[62204]: DEBUG nova.compute.manager [-] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 751.099851] env[62204]: DEBUG nova.network.neutron [-] [instance: 432115aa-8999-40fe-a0cb-31433575c912] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 751.264191] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.287539] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7848c056-5d78-4df1-b196-840b89f7afc5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.295751] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57216bc7-b216-4fc8-b411-2f1331c6b39a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.327558] env[62204]: DEBUG nova.network.neutron [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Updated VIF entry in instance network info cache for port efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 751.327892] env[62204]: DEBUG nova.network.neutron [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Updating instance_info_cache with network_info: [{"id": "efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6", "address": "fa:16:3e:e4:f1:65", "network": {"id": "e77d2ead-3a12-48b5-8a98-f4278c246ad3", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-698776275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64447421900e4b709582ce52efcb7a68", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8e7f6f41-f4eb-4832-a390-730fca1cf717", "external-id": "nsx-vlan-transportzone-724", "segmentation_id": 724, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefd4ced4-49", "ovs_interfaceid": "efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.330191] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58724bd2-30c5-4118-8c14-fd74bf35a6aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.338498] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b78468-d807-430c-ae67-e366f723bc94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.355107] env[62204]: DEBUG nova.compute.provider_tree [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.408381] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 751.445895] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3fb6ab94-74f2-4dd0-ae08-4cc2f92c43d2 tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.828s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.487425] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eef8a8-0c19-13f7-00fb-f823e493ebde, 'name': SearchDatastore_Task, 'duration_secs': 0.027634} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.487700] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.488187] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.488425] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.488569] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.488737] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.488988] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a014713a-0e29-4e56-9976-d060592c3488 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.506330] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.506795] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.507525] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0fe93a1-f017-4abf-9b68-5bccd6c98b41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.515377] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 751.515377] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52723438-65d5-f42d-d622-dfceab999714" [ 751.515377] env[62204]: _type = "Task" [ 751.515377] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.527459] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52723438-65d5-f42d-d622-dfceab999714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.531300] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199588, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603699} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.531684] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.531900] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.532168] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-729fb720-dd90-454a-8924-57b2894ad0b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.539035] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 751.539035] env[62204]: value = "task-1199591" [ 751.539035] env[62204]: _type = "Task" [ 751.539035] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.547918] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.834413] env[62204]: DEBUG oslo_concurrency.lockutils [req-4a5ee634-7f9c-4f72-b31c-17dfab41ad92 req-1780d7c5-19df-4e3d-a8a4-3451de7f5040 service nova] Releasing lock "refresh_cache-55d1649c-5eff-4264-bce1-dd907f9531f2" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.859909] env[62204]: DEBUG nova.scheduler.client.report [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 751.935561] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.948889] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 752.027367] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52723438-65d5-f42d-d622-dfceab999714, 'name': SearchDatastore_Task, 'duration_secs': 0.044661} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.027982] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de6b2183-b8b6-4fda-bfcb-7045077c263b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.034605] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 752.034605] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5281a4f0-79dd-529d-d3dd-8d3f00251392" [ 752.034605] env[62204]: _type = "Task" [ 752.034605] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.043114] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5281a4f0-79dd-529d-d3dd-8d3f00251392, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.052061] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068461} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.052443] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.053289] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28912665-330f-4ee4-ab3e-4114b46152c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.077819] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.078194] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-720ee491-56cb-4ddd-ae34-c3f1a65e6967 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.098520] env[62204]: DEBUG nova.compute.manager [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Received event network-changed-70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 752.098818] env[62204]: DEBUG nova.compute.manager [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Refreshing instance network info cache due to event network-changed-70a9a29f-b59c-48d3-8c34-c3bbff8169e0. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 752.099186] env[62204]: DEBUG oslo_concurrency.lockutils [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] Acquiring lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.099369] env[62204]: DEBUG oslo_concurrency.lockutils [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] Acquired lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.099591] env[62204]: DEBUG nova.network.neutron [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Refreshing network info cache for port 70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 752.107243] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 752.107243] env[62204]: value = "task-1199592" [ 752.107243] env[62204]: _type = "Task" [ 752.107243] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.117205] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199592, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.210065] env[62204]: DEBUG nova.network.neutron [-] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.367960] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.035s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.367960] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.924s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.367960] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.367960] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 752.367960] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.792s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.369912] env[62204]: INFO nova.compute.claims [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.373246] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eacb478-7f72-4fa5-89b5-b129730b6ffe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.381917] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267bb8c0-0b86-42e6-aa3b-b85e9eb45836 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.391713] env[62204]: INFO nova.scheduler.client.report [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Deleted allocations for instance 7c21539c-35fa-4f58-beb0-e965ffaf79af [ 752.405339] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8f2fb4-92ad-44ff-bf73-e42641c0cc28 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.412824] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84452dd6-ce1f-4941-bcf8-93f199f7c503 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.443408] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181245MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 752.443408] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.479840] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.545636] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5281a4f0-79dd-529d-d3dd-8d3f00251392, 'name': SearchDatastore_Task, 'duration_secs': 0.012948} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.545923] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.546308] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 55d1649c-5eff-4264-bce1-dd907f9531f2/55d1649c-5eff-4264-bce1-dd907f9531f2.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.546673] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea03f9ba-94b3-48aa-9db2-32aca2f3f257 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.554480] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 752.554480] env[62204]: value = "task-1199593" [ 752.554480] env[62204]: _type = "Task" [ 752.554480] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.567446] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.621433] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199592, 'name': ReconfigVM_Task, 'duration_secs': 0.483069} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.621433] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.621552] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf1dca68-87f4-45f6-99c0-a871949cdb2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.628036] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 752.628036] env[62204]: value = "task-1199594" [ 752.628036] env[62204]: _type = "Task" [ 752.628036] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.638342] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199594, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.713666] env[62204]: INFO nova.compute.manager [-] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Took 1.61 seconds to deallocate network for instance. [ 752.918523] env[62204]: DEBUG oslo_concurrency.lockutils [None req-219300fd-2a0b-4548-9e0a-8c0a7edf5042 tempest-ServerShowV247Test-1079756337 tempest-ServerShowV247Test-1079756337-project-member] Lock "7c21539c-35fa-4f58-beb0-e965ffaf79af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.244s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.074743] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199593, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.077068] env[62204]: DEBUG nova.network.neutron [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updated VIF entry in instance network info cache for port 70a9a29f-b59c-48d3-8c34-c3bbff8169e0. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 753.077514] env[62204]: DEBUG nova.network.neutron [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updating instance_info_cache with network_info: [{"id": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "address": "fa:16:3e:f9:be:71", "network": {"id": "75ae1e87-ea66-41e5-a2eb-4b54cfdc6a2b", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-1366063071-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53d52da8765c4d69b803b4f18d0d9bc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d0c6fd7-3cc9-4818-9475-8f15900394cc", "external-id": "nsx-vlan-transportzone-317", "segmentation_id": 317, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70a9a29f-b5", "ovs_interfaceid": "70a9a29f-b59c-48d3-8c34-c3bbff8169e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.095830] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.096291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.096583] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.096813] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 753.097237] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.099393] env[62204]: INFO nova.compute.manager [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Terminating instance [ 753.103164] env[62204]: DEBUG nova.compute.manager [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 753.103372] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.106341] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdba1e14-94b3-40a6-8431-b70ef5db7c5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.118567] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 753.118776] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bbb0310-9f6f-4a04-a6f8-38d9363faab6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.127035] env[62204]: DEBUG oslo_vmware.api [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 753.127035] env[62204]: value = "task-1199595" [ 753.127035] env[62204]: _type = "Task" [ 753.127035] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.140973] env[62204]: DEBUG oslo_vmware.api [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.144506] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199594, 'name': Rename_Task, 'duration_secs': 0.159535} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.145818] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.145818] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e277099-420c-45f7-9d85-4f1c00cf2470 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.152485] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 753.152485] env[62204]: value = "task-1199596" [ 753.152485] env[62204]: _type = "Task" [ 753.152485] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.165221] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.220480] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.569419] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611692} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.569758] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 55d1649c-5eff-4264-bce1-dd907f9531f2/55d1649c-5eff-4264-bce1-dd907f9531f2.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.570045] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.570360] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aae22d6f-8ccd-4bc4-92ac-73499f233f82 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.580065] env[62204]: DEBUG oslo_concurrency.lockutils [req-1d2a0453-0154-4b3b-888d-737ca7718629 req-09244440-6d02-42ec-817e-c8997e1cbd79 service nova] Releasing lock "refresh_cache-69604167-6a61-4723-bf7d-7ba168837839" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.583494] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 753.583494] env[62204]: value = "task-1199597" [ 753.583494] env[62204]: _type = "Task" [ 753.583494] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.600516] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199597, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.641745] env[62204]: DEBUG oslo_vmware.api [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199595, 'name': PowerOffVM_Task, 'duration_secs': 0.293674} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.642246] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 753.642601] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 753.642972] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71dff98c-69a7-4059-b607-bf0ac3c27997 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.663469] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.724448] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.728779] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.728779] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Deleting the datastore file [datastore2] eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.728779] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6cb87bc9-6081-4dd6-b31c-05e8d67fd919 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.731506] env[62204]: DEBUG oslo_vmware.api [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for the task: (returnval){ [ 753.731506] env[62204]: value = "task-1199599" [ 753.731506] env[62204]: _type = "Task" [ 753.731506] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.741476] env[62204]: DEBUG oslo_vmware.api [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199599, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.796192] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf39d2f7-d68a-4eab-9a58-02c6e182ce5c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.803881] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eed3a09-7f07-4ffe-9b08-bebcf5e2b432 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.836156] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d46bb3f-a365-493a-aff3-695efea11773 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.844159] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e795c6ec-b5be-447b-b5a5-098993e6fbe0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.859558] env[62204]: DEBUG nova.compute.provider_tree [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.094024] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199597, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07466} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.094530] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.095500] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5baaf3-1fd5-40c2-805f-6795977a02a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.120234] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 55d1649c-5eff-4264-bce1-dd907f9531f2/55d1649c-5eff-4264-bce1-dd907f9531f2.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.120480] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1ee94b8-de4c-4544-8625-df9f24706b8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.140727] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 754.140727] env[62204]: value = "task-1199600" [ 754.140727] env[62204]: _type = "Task" [ 754.140727] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.150132] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.162906] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199596, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.179163] env[62204]: DEBUG nova.compute.manager [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Received event network-vif-deleted-b93d9c4c-717c-4679-bcb8-b49b9517e2b8 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 754.179337] env[62204]: DEBUG nova.compute.manager [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Received event network-changed-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 754.179512] env[62204]: DEBUG nova.compute.manager [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Refreshing instance network info cache due to event network-changed-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 754.179732] env[62204]: DEBUG oslo_concurrency.lockutils [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] Acquiring lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.179870] env[62204]: DEBUG oslo_concurrency.lockutils [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] Acquired lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.180038] env[62204]: DEBUG nova.network.neutron [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Refreshing network info cache for port eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 754.240742] env[62204]: DEBUG oslo_vmware.api [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Task: {'id': task-1199599, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290285} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.240995] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 754.241197] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 754.241371] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 754.241551] env[62204]: INFO nova.compute.manager [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Took 1.14 seconds to destroy the instance on the hypervisor. [ 754.241776] env[62204]: DEBUG oslo.service.loopingcall [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.241955] env[62204]: DEBUG nova.compute.manager [-] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.242058] env[62204]: DEBUG nova.network.neutron [-] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 754.362774] env[62204]: DEBUG nova.scheduler.client.report [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 754.654874] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.663427] env[62204]: DEBUG oslo_vmware.api [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199596, 'name': PowerOnVM_Task, 'duration_secs': 1.037153} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.663608] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 754.663823] env[62204]: INFO nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Took 9.18 seconds to spawn the instance on the hypervisor. [ 754.663989] env[62204]: DEBUG nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 754.664852] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d657032-f6c4-41cb-86b3-16051ffc27f0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.870728] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.871274] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 754.874500] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.258s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.876070] env[62204]: INFO nova.compute.claims [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.105044] env[62204]: DEBUG nova.network.neutron [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updated VIF entry in instance network info cache for port eaf8a6d2-9ed4-4008-9072-23d2ac93fc16. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 755.105467] env[62204]: DEBUG nova.network.neutron [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updating instance_info_cache with network_info: [{"id": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "address": "fa:16:3e:37:f2:59", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8a6d2-9e", "ovs_interfaceid": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.152369] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199600, 'name': ReconfigVM_Task, 'duration_secs': 0.96647} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.152687] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 55d1649c-5eff-4264-bce1-dd907f9531f2/55d1649c-5eff-4264-bce1-dd907f9531f2.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.153374] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8ea4183-56a7-4708-a39d-2cf7d770a0d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.160593] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 755.160593] env[62204]: value = "task-1199601" [ 755.160593] env[62204]: _type = "Task" [ 755.160593] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.169726] env[62204]: DEBUG nova.network.neutron [-] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.174842] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199601, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.188865] env[62204]: INFO nova.compute.manager [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Took 33.29 seconds to build instance. [ 755.383658] env[62204]: DEBUG nova.compute.utils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.385768] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 755.386201] env[62204]: DEBUG nova.network.neutron [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 755.497606] env[62204]: DEBUG nova.policy [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '052e8b58b8554c02a492ef696d6057bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56643ee7896c48bf9be3dd1cb1c9fc80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 755.610743] env[62204]: DEBUG oslo_concurrency.lockutils [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] Releasing lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.610743] env[62204]: DEBUG nova.compute.manager [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Received event network-changed-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 755.610743] env[62204]: DEBUG nova.compute.manager [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Refreshing instance network info cache due to event network-changed-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 755.610743] env[62204]: DEBUG oslo_concurrency.lockutils [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] Acquiring lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 755.610743] env[62204]: DEBUG oslo_concurrency.lockutils [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] Acquired lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.610743] env[62204]: DEBUG nova.network.neutron [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Refreshing network info cache for port eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 755.674512] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199601, 'name': Rename_Task, 'duration_secs': 0.15622} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.674512] env[62204]: INFO nova.compute.manager [-] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Took 1.43 seconds to deallocate network for instance. [ 755.674512] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.675501] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5680ac6d-0a21-49e1-879f-b7d7efc134cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.683387] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 755.683387] env[62204]: value = "task-1199602" [ 755.683387] env[62204]: _type = "Task" [ 755.683387] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.692036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ef7d60e8-83db-4195-bc92-dd1cea56f6c6 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.328s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.693314] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199602, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.892578] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 756.185218] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.195825] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 756.205030] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199602, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.356776] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2bf187-9337-4c01-83a9-5507afb20f9a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.364691] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e73778-56a1-4e0b-bbda-900898d32635 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.411083] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97afe0bc-971c-4fb1-bede-4da83e6a7175 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.419642] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175136aa-5837-48c0-a4d6-aaa19d67c541 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.438311] env[62204]: DEBUG nova.compute.provider_tree [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.473126] env[62204]: DEBUG nova.network.neutron [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updated VIF entry in instance network info cache for port eaf8a6d2-9ed4-4008-9072-23d2ac93fc16. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 756.473126] env[62204]: DEBUG nova.network.neutron [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updating instance_info_cache with network_info: [{"id": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "address": "fa:16:3e:37:f2:59", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaf8a6d2-9e", "ovs_interfaceid": "eaf8a6d2-9ed4-4008-9072-23d2ac93fc16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.580594] env[62204]: DEBUG nova.network.neutron [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Successfully created port: ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.697720] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199602, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.725145] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.737022] env[62204]: DEBUG nova.compute.manager [req-a8e12f03-d4fe-490b-a7da-f0af5c3716b8 req-c25cbb5e-566a-4b2d-88ce-6eb3b270c9b7 service nova] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Received event network-vif-deleted-10a18bfc-ebd6-4f8a-af35-34114768d9be {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 756.916611] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 756.942337] env[62204]: DEBUG nova.scheduler.client.report [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.972725] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.972725] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.972725] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.972872] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.973243] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.973243] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.973898] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.973898] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.973898] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.973898] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.974058] env[62204]: DEBUG nova.virt.hardware [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.974906] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8bbf3b-2beb-4afa-94ea-7839bce285cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.978082] env[62204]: DEBUG oslo_concurrency.lockutils [req-36f91f7c-b924-48ba-8988-9d644a9d1ca8 req-6ef75bd0-c496-46ca-b4a7-e276bf6a45bb service nova] Releasing lock "refresh_cache-51c9e353-f2cf-41b4-b37e-1cfd5dca0518" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.984911] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036cfe6c-16bb-494a-a564-11672ac3c982 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.200928] env[62204]: DEBUG oslo_vmware.api [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199602, 'name': PowerOnVM_Task, 'duration_secs': 1.125295} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.201278] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.201494] env[62204]: INFO nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Took 9.18 seconds to spawn the instance on the hypervisor. [ 757.201711] env[62204]: DEBUG nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 757.202522] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030f2cc6-2b40-4f1a-993c-c841b5214549 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.447687] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.448270] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 757.451108] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.845s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.452729] env[62204]: INFO nova.compute.claims [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.722734] env[62204]: INFO nova.compute.manager [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Took 35.28 seconds to build instance. [ 757.959025] env[62204]: DEBUG nova.compute.utils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.964360] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 757.964826] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 758.007099] env[62204]: DEBUG nova.policy [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '902a7731e2664ccd8e880e1dd25b5598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '749474347b5d417197e01fcca204d3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 758.149105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.149105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.226069] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2f6e56a-71e5-4db8-8ca8-aa97232e6b2c tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.757s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.294962] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Successfully created port: fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.465490] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 758.670056] env[62204]: DEBUG nova.network.neutron [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Successfully updated port: ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 758.727975] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 758.818510] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "6dc170a4-b08e-44b5-a152-832670e6866b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.818763] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "6dc170a4-b08e-44b5-a152-832670e6866b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.891800] env[62204]: DEBUG nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 758.892008] env[62204]: DEBUG nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing instance network info cache due to event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 758.892262] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.892363] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.892525] env[62204]: DEBUG nova.network.neutron [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 758.911824] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1528fe0d-44f0-4031-98b7-773746394780 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.922510] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1430439-898e-4734-aa59-63a6ff46be73 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.962657] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2614d685-50ec-4065-a4db-13a182b42b5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.972052] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c5ffef-4c14-4b8f-b9ff-19f4e25eac8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.992333] env[62204]: DEBUG nova.compute.provider_tree [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.172191] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.172372] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.172542] env[62204]: DEBUG nova.network.neutron [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.257603] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.487022] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 759.498023] env[62204]: DEBUG nova.scheduler.client.report [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.523529] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 759.524014] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 759.524231] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.524480] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 759.524664] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.524842] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 759.525096] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 759.525307] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 759.525543] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 759.525719] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 759.525913] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 759.527178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a989b70-3254-4788-93c0-2bf3afb49995 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.540184] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c27c18-685b-448b-b926-1b304461f9c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.680370] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "55d1649c-5eff-4264-bce1-dd907f9531f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.680919] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.680919] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "55d1649c-5eff-4264-bce1-dd907f9531f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.681096] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.681195] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.683743] env[62204]: INFO nova.compute.manager [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Terminating instance [ 759.687057] env[62204]: DEBUG nova.compute.manager [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 759.687279] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.689451] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ac1319-7e0e-4077-8ac9-cb3706e3781e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.700904] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 759.701268] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45d0709d-b954-4f62-9263-9e870ee61ae5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.711114] env[62204]: DEBUG oslo_vmware.api [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 759.711114] env[62204]: value = "task-1199603" [ 759.711114] env[62204]: _type = "Task" [ 759.711114] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.722209] env[62204]: DEBUG oslo_vmware.api [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.723165] env[62204]: DEBUG nova.network.neutron [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.852533] env[62204]: DEBUG nova.network.neutron [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updated VIF entry in instance network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 759.852591] env[62204]: DEBUG nova.network.neutron [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.968457] env[62204]: DEBUG nova.compute.manager [req-980956e6-651b-4258-bc1f-9854fc4d65f9 req-0ad13216-bda3-4e82-94d8-5f0920ca277a service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Received event network-vif-plugged-fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 759.968665] env[62204]: DEBUG oslo_concurrency.lockutils [req-980956e6-651b-4258-bc1f-9854fc4d65f9 req-0ad13216-bda3-4e82-94d8-5f0920ca277a service nova] Acquiring lock "703bf0c4-9bff-4967-8e84-09969b32b5a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.968892] env[62204]: DEBUG oslo_concurrency.lockutils [req-980956e6-651b-4258-bc1f-9854fc4d65f9 req-0ad13216-bda3-4e82-94d8-5f0920ca277a service nova] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.969043] env[62204]: DEBUG oslo_concurrency.lockutils [req-980956e6-651b-4258-bc1f-9854fc4d65f9 req-0ad13216-bda3-4e82-94d8-5f0920ca277a service nova] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.969261] env[62204]: DEBUG nova.compute.manager [req-980956e6-651b-4258-bc1f-9854fc4d65f9 req-0ad13216-bda3-4e82-94d8-5f0920ca277a service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] No waiting events found dispatching network-vif-plugged-fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 759.969439] env[62204]: WARNING nova.compute.manager [req-980956e6-651b-4258-bc1f-9854fc4d65f9 req-0ad13216-bda3-4e82-94d8-5f0920ca277a service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Received unexpected event network-vif-plugged-fc55488d-fc4c-43c1-9d23-fcd59ee87c5b for instance with vm_state building and task_state spawning. [ 759.975593] env[62204]: DEBUG nova.network.neutron [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating instance_info_cache with network_info: [{"id": "ac345dde-4672-4c9d-a224-24ebc7900628", "address": "fa:16:3e:41:0f:f2", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac345dde-46", "ovs_interfaceid": "ac345dde-4672-4c9d-a224-24ebc7900628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.011742] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.012500] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 760.016898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.503s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.017224] env[62204]: DEBUG nova.objects.instance [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lazy-loading 'resources' on Instance uuid 12656a79-a836-452c-8f94-c8e142c9ec2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.029014] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Successfully updated port: fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 760.223482] env[62204]: DEBUG oslo_vmware.api [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199603, 'name': PowerOffVM_Task, 'duration_secs': 0.311023} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.223825] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.224057] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.224499] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bea7cb9-5dce-4d2a-ae47-2fcb387766c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.297013] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 760.297362] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 760.297434] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Deleting the datastore file [datastore1] 55d1649c-5eff-4264-bce1-dd907f9531f2 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 760.297709] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70e0d8d0-7451-4475-93fc-6220d8e45e08 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.304934] env[62204]: DEBUG oslo_vmware.api [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for the task: (returnval){ [ 760.304934] env[62204]: value = "task-1199605" [ 760.304934] env[62204]: _type = "Task" [ 760.304934] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.313786] env[62204]: DEBUG oslo_vmware.api [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.355074] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.355503] env[62204]: DEBUG nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Received event network-vif-plugged-ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 760.355885] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.356254] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.356545] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.356845] env[62204]: DEBUG nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] No waiting events found dispatching network-vif-plugged-ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 760.357130] env[62204]: WARNING nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Received unexpected event network-vif-plugged-ac345dde-4672-4c9d-a224-24ebc7900628 for instance with vm_state building and task_state spawning. [ 760.357424] env[62204]: DEBUG nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Received event network-changed-ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 760.357703] env[62204]: DEBUG nova.compute.manager [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Refreshing instance network info cache due to event network-changed-ac345dde-4672-4c9d-a224-24ebc7900628. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 760.357981] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Acquiring lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.481240] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.481505] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Instance network_info: |[{"id": "ac345dde-4672-4c9d-a224-24ebc7900628", "address": "fa:16:3e:41:0f:f2", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac345dde-46", "ovs_interfaceid": "ac345dde-4672-4c9d-a224-24ebc7900628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 760.481812] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Acquired lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.481990] env[62204]: DEBUG nova.network.neutron [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Refreshing network info cache for port ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 760.483285] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:0f:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac345dde-4672-4c9d-a224-24ebc7900628', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.495026] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Creating folder: Project (56643ee7896c48bf9be3dd1cb1c9fc80). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.495026] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d8f4ec1-8ba9-4c57-ab89-8a18a54aebc5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.506765] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Created folder: Project (56643ee7896c48bf9be3dd1cb1c9fc80) in parent group-v259933. [ 760.506965] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Creating folder: Instances. Parent ref: group-v259998. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 760.507215] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caeaaf79-3d25-4504-865b-d8394faca1db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.519587] env[62204]: DEBUG nova.compute.utils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 760.521150] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Created folder: Instances in parent group-v259998. [ 760.521435] env[62204]: DEBUG oslo.service.loopingcall [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.524342] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 760.524580] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 760.526323] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.526910] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87a25aa6-cf8f-4983-8de5-44eb49c5ca5a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.545691] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "refresh_cache-703bf0c4-9bff-4967-8e84-09969b32b5a1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.545691] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "refresh_cache-703bf0c4-9bff-4967-8e84-09969b32b5a1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.545823] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 760.558985] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.558985] env[62204]: value = "task-1199608" [ 760.558985] env[62204]: _type = "Task" [ 760.558985] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.567144] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199608, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.596540] env[62204]: DEBUG nova.policy [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '902a7731e2664ccd8e880e1dd25b5598', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '749474347b5d417197e01fcca204d3d6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 760.602768] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.604369] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.818204] env[62204]: DEBUG oslo_vmware.api [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Task: {'id': task-1199605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.420201} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.821634] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 760.821753] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 760.821926] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 760.822113] env[62204]: INFO nova.compute.manager [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 760.822355] env[62204]: DEBUG oslo.service.loopingcall [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.823663] env[62204]: DEBUG nova.compute.manager [-] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 760.823663] env[62204]: DEBUG nova.network.neutron [-] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 760.883446] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Successfully created port: 41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.947864] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228fecfe-38a7-4543-a5ec-d352db2ee553 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.956431] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3229d4f0-d60c-4901-95de-8a286ec0ebaa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.991374] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaaf2e74-afb4-4df5-8aef-bc407a5cd887 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.003235] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c55123d-aa3a-4e36-991b-d01e96c73410 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.018196] env[62204]: DEBUG nova.compute.provider_tree [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.022801] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 761.069845] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199608, 'name': CreateVM_Task, 'duration_secs': 0.437299} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.070028] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.070739] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.070905] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.071246] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 761.072104] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e548930-0c58-44b8-9697-9d84faf55ab2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.078551] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 761.078551] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52454b72-46ef-89a2-51dd-9ca5fb59457f" [ 761.078551] env[62204]: _type = "Task" [ 761.078551] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.088302] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52454b72-46ef-89a2-51dd-9ca5fb59457f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.132482] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 761.505090] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Updating instance_info_cache with network_info: [{"id": "fc55488d-fc4c-43c1-9d23-fcd59ee87c5b", "address": "fa:16:3e:9a:15:a4", "network": {"id": "29c2c6b3-02eb-4b1a-9351-8baf33c0d9a5", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1012926608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "749474347b5d417197e01fcca204d3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc55488d-fc", "ovs_interfaceid": "fc55488d-fc4c-43c1-9d23-fcd59ee87c5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.518708] env[62204]: DEBUG nova.network.neutron [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updated VIF entry in instance network info cache for port ac345dde-4672-4c9d-a224-24ebc7900628. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 761.519060] env[62204]: DEBUG nova.network.neutron [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating instance_info_cache with network_info: [{"id": "ac345dde-4672-4c9d-a224-24ebc7900628", "address": "fa:16:3e:41:0f:f2", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac345dde-46", "ovs_interfaceid": "ac345dde-4672-4c9d-a224-24ebc7900628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.520944] env[62204]: DEBUG nova.scheduler.client.report [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.590140] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52454b72-46ef-89a2-51dd-9ca5fb59457f, 'name': SearchDatastore_Task, 'duration_secs': 0.012463} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.590469] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.590771] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.591037] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.591192] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.591377] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.591658] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0339b837-2a0b-4271-b054-c0f8cab987e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.600874] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.601073] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.601803] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6de96c29-a836-486a-a76b-0b0209ee48cf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.607967] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 761.607967] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527623e0-4e28-9848-2c20-1c929a74c857" [ 761.607967] env[62204]: _type = "Task" [ 761.607967] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.616117] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527623e0-4e28-9848-2c20-1c929a74c857, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.758107] env[62204]: DEBUG nova.network.neutron [-] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.005131] env[62204]: DEBUG nova.compute.manager [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Received event network-changed-fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 762.005355] env[62204]: DEBUG nova.compute.manager [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Refreshing instance network info cache due to event network-changed-fc55488d-fc4c-43c1-9d23-fcd59ee87c5b. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 762.005561] env[62204]: DEBUG oslo_concurrency.lockutils [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] Acquiring lock "refresh_cache-703bf0c4-9bff-4967-8e84-09969b32b5a1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.007379] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "refresh_cache-703bf0c4-9bff-4967-8e84-09969b32b5a1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.007712] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Instance network_info: |[{"id": "fc55488d-fc4c-43c1-9d23-fcd59ee87c5b", "address": "fa:16:3e:9a:15:a4", "network": {"id": "29c2c6b3-02eb-4b1a-9351-8baf33c0d9a5", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1012926608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "749474347b5d417197e01fcca204d3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc55488d-fc", "ovs_interfaceid": "fc55488d-fc4c-43c1-9d23-fcd59ee87c5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 762.008041] env[62204]: DEBUG oslo_concurrency.lockutils [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] Acquired lock "refresh_cache-703bf0c4-9bff-4967-8e84-09969b32b5a1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.008264] env[62204]: DEBUG nova.network.neutron [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Refreshing network info cache for port fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 762.010028] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:15:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc55488d-fc4c-43c1-9d23-fcd59ee87c5b', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 762.017206] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Creating folder: Project (749474347b5d417197e01fcca204d3d6). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.020218] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2e2d19e-2fe3-495c-a43f-f6bdb4c3e220 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.022249] env[62204]: DEBUG oslo_concurrency.lockutils [req-b9a6b14e-25eb-4a66-b986-3adaef5a1da9 req-5bc225cc-9e8f-44a5-80e0-7360f45d45d4 service nova] Releasing lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.025036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.027071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.285s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.027325] env[62204]: DEBUG nova.objects.instance [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lazy-loading 'resources' on Instance uuid 258ec37d-c791-4c43-8725-0f4b4bbf9b5b {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 762.030718] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 762.038032] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Created folder: Project (749474347b5d417197e01fcca204d3d6) in parent group-v259933. [ 762.038236] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Creating folder: Instances. Parent ref: group-v260001. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 762.038552] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf0da75c-9d1f-4ddd-b181-cb3623878d5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.052286] env[62204]: INFO nova.scheduler.client.report [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Deleted allocations for instance 12656a79-a836-452c-8f94-c8e142c9ec2f [ 762.055628] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Created folder: Instances in parent group-v260001. [ 762.055628] env[62204]: DEBUG oslo.service.loopingcall [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 762.057598] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 762.058537] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-233c59e7-2fa0-40be-9b3e-3d5941e7d012 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.074789] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 762.075103] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 762.075319] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.075508] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 762.075653] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.075794] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 762.075996] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 762.076167] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 762.076333] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 762.076497] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 762.076664] env[62204]: DEBUG nova.virt.hardware [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 762.077804] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ba471e-7d9d-4a92-959a-396e0d43c43f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.089316] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd723258-c621-4c00-8a87-e91ddde95090 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.094625] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 762.094625] env[62204]: value = "task-1199611" [ 762.094625] env[62204]: _type = "Task" [ 762.094625] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.111225] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199611, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.122749] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527623e0-4e28-9848-2c20-1c929a74c857, 'name': SearchDatastore_Task, 'duration_secs': 0.011328} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.123894] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-becab5a3-12d9-4b33-81a8-2ed363a554f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.132312] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 762.132312] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520178b9-3cf1-ee18-b71c-6e2992b503d6" [ 762.132312] env[62204]: _type = "Task" [ 762.132312] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.141344] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520178b9-3cf1-ee18-b71c-6e2992b503d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.261206] env[62204]: INFO nova.compute.manager [-] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Took 1.44 seconds to deallocate network for instance. [ 762.292457] env[62204]: DEBUG nova.network.neutron [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Updated VIF entry in instance network info cache for port fc55488d-fc4c-43c1-9d23-fcd59ee87c5b. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 762.292572] env[62204]: DEBUG nova.network.neutron [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Updating instance_info_cache with network_info: [{"id": "fc55488d-fc4c-43c1-9d23-fcd59ee87c5b", "address": "fa:16:3e:9a:15:a4", "network": {"id": "29c2c6b3-02eb-4b1a-9351-8baf33c0d9a5", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1012926608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "749474347b5d417197e01fcca204d3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc55488d-fc", "ovs_interfaceid": "fc55488d-fc4c-43c1-9d23-fcd59ee87c5b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.561824] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ee5f267-593f-47b7-9742-4663f39bb1d9 tempest-InstanceActionsNegativeTestJSON-643811987 tempest-InstanceActionsNegativeTestJSON-643811987-project-member] Lock "12656a79-a836-452c-8f94-c8e142c9ec2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.638s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 762.607214] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199611, 'name': CreateVM_Task, 'duration_secs': 0.402011} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.607356] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 762.608030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.608202] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.608554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 762.608756] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da0032e6-ee0d-42ab-bd69-e5badb53e688 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.619420] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 762.619420] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528cbbfc-af8e-d2ce-c572-e87c5d215ee3" [ 762.619420] env[62204]: _type = "Task" [ 762.619420] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.634131] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528cbbfc-af8e-d2ce-c572-e87c5d215ee3, 'name': SearchDatastore_Task, 'duration_secs': 0.009546} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.637996] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.638293] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 762.638502] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.647636] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520178b9-3cf1-ee18-b71c-6e2992b503d6, 'name': SearchDatastore_Task, 'duration_secs': 0.01045} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.648903] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.648903] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f/4793e9fd-be87-4885-8f0e-1fcef6ce4d2f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.648903] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.648903] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.648903] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59921e50-4f62-44bb-b960-4e2639b4fc46 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.651203] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9be7d33e-d042-4380-887b-ac33b44e720e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.654770] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Successfully updated port: 41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.661802] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 762.661802] env[62204]: value = "task-1199612" [ 762.661802] env[62204]: _type = "Task" [ 762.661802] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.669105] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.669105] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 762.670804] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a50325e6-7a5e-4380-b6bc-41986ca42e0d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.676499] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199612, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.680367] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 762.680367] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52699e4d-e51e-f7e9-61e0-4de032997531" [ 762.680367] env[62204]: _type = "Task" [ 762.680367] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.692272] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52699e4d-e51e-f7e9-61e0-4de032997531, 'name': SearchDatastore_Task, 'duration_secs': 0.009336} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.693068] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08796ddb-0d26-447e-a888-e17f63790eb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.703423] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 762.703423] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52513ba7-6f18-57a0-e291-657933a8013d" [ 762.703423] env[62204]: _type = "Task" [ 762.703423] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.710536] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52513ba7-6f18-57a0-e291-657933a8013d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.770956] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.795729] env[62204]: DEBUG oslo_concurrency.lockutils [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] Releasing lock "refresh_cache-703bf0c4-9bff-4967-8e84-09969b32b5a1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.796154] env[62204]: DEBUG nova.compute.manager [req-02aa1d12-6338-47a6-b3e8-a07e80a6c339 req-bde63df5-66e5-4dd8-90c4-80cf4483bf17 service nova] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Received event network-vif-deleted-efd4ced4-49e8-4f5f-8c9c-ec37a6b773c6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.061810] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395b19f3-24c7-4de2-8165-ed58f34075eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.071746] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c3a72e-6bac-4068-bee2-316f47748814 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.105856] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19d9d2bf-a1c2-4743-bec5-685ac6dba3bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.114405] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aace394-b3dd-4f1a-b189-11b448423d84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.128481] env[62204]: DEBUG nova.compute.provider_tree [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.157310] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "refresh_cache-ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.157464] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "refresh_cache-ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.157906] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 763.178095] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199612, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.214247] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52513ba7-6f18-57a0-e291-657933a8013d, 'name': SearchDatastore_Task, 'duration_secs': 0.009278} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.214511] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.214774] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 703bf0c4-9bff-4967-8e84-09969b32b5a1/703bf0c4-9bff-4967-8e84-09969b32b5a1.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.215059] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28016eee-3327-435e-8aef-940ac80baea9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.224234] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 763.224234] env[62204]: value = "task-1199613" [ 763.224234] env[62204]: _type = "Task" [ 763.224234] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.233865] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.635263] env[62204]: DEBUG nova.scheduler.client.report [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.674475] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199612, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.692144] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.735078] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.909530] env[62204]: DEBUG nova.network.neutron [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Updating instance_info_cache with network_info: [{"id": "41f6daea-83a0-45cc-a106-3528a62481e1", "address": "fa:16:3e:d2:af:6b", "network": {"id": "29c2c6b3-02eb-4b1a-9351-8baf33c0d9a5", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1012926608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "749474347b5d417197e01fcca204d3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41f6daea-83", "ovs_interfaceid": "41f6daea-83a0-45cc-a106-3528a62481e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.041775] env[62204]: DEBUG nova.compute.manager [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Received event network-vif-plugged-41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 764.042069] env[62204]: DEBUG oslo_concurrency.lockutils [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] Acquiring lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.042325] env[62204]: DEBUG oslo_concurrency.lockutils [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.042538] env[62204]: DEBUG oslo_concurrency.lockutils [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.042747] env[62204]: DEBUG nova.compute.manager [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] No waiting events found dispatching network-vif-plugged-41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 764.042951] env[62204]: WARNING nova.compute.manager [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Received unexpected event network-vif-plugged-41f6daea-83a0-45cc-a106-3528a62481e1 for instance with vm_state building and task_state spawning. [ 764.043162] env[62204]: DEBUG nova.compute.manager [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Received event network-changed-41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 764.043405] env[62204]: DEBUG nova.compute.manager [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Refreshing instance network info cache due to event network-changed-41f6daea-83a0-45cc-a106-3528a62481e1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 764.043610] env[62204]: DEBUG oslo_concurrency.lockutils [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] Acquiring lock "refresh_cache-ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.148280] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.121s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.151160] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.207s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.151565] env[62204]: DEBUG nova.objects.instance [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lazy-loading 'resources' on Instance uuid b0180c2b-8edf-4d15-8d12-c754b73f6030 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 764.177142] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199612, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.481458} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.181020] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f/4793e9fd-be87-4885-8f0e-1fcef6ce4d2f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.181020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.181020] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9d6caf5-7680-448a-99e7-ad5e792b69fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.183292] env[62204]: INFO nova.scheduler.client.report [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Deleted allocations for instance 258ec37d-c791-4c43-8725-0f4b4bbf9b5b [ 764.194888] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 764.194888] env[62204]: value = "task-1199614" [ 764.194888] env[62204]: _type = "Task" [ 764.194888] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.204529] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.235746] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199613, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.413597] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "refresh_cache-ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.413736] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Instance network_info: |[{"id": "41f6daea-83a0-45cc-a106-3528a62481e1", "address": "fa:16:3e:d2:af:6b", "network": {"id": "29c2c6b3-02eb-4b1a-9351-8baf33c0d9a5", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1012926608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "749474347b5d417197e01fcca204d3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41f6daea-83", "ovs_interfaceid": "41f6daea-83a0-45cc-a106-3528a62481e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 764.414052] env[62204]: DEBUG oslo_concurrency.lockutils [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] Acquired lock "refresh_cache-ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.414248] env[62204]: DEBUG nova.network.neutron [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Refreshing network info cache for port 41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 764.415976] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:af:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41f6daea-83a0-45cc-a106-3528a62481e1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.423577] env[62204]: DEBUG oslo.service.loopingcall [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.424129] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.424371] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e986f02-9b66-44df-8574-c797491f7a81 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.445897] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.445897] env[62204]: value = "task-1199615" [ 764.445897] env[62204]: _type = "Task" [ 764.445897] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.459411] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199615, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.691537] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a60c2cbc-0324-4383-8a58-84afebea898f tempest-ServerDiagnosticsNegativeTest-1215951225 tempest-ServerDiagnosticsNegativeTest-1215951225-project-member] Lock "258ec37d-c791-4c43-8725-0f4b4bbf9b5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.002s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.710504] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073539} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.710798] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.711657] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6f4058-61b1-41fb-8926-561ec60637be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.739395] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f/4793e9fd-be87-4885-8f0e-1fcef6ce4d2f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.745432] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c9a405e-ed76-41ff-b2f7-7ba6cbb774b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.769837] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199613, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.489159} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.772076] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 703bf0c4-9bff-4967-8e84-09969b32b5a1/703bf0c4-9bff-4967-8e84-09969b32b5a1.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.772076] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.772076] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 764.772076] env[62204]: value = "task-1199616" [ 764.772076] env[62204]: _type = "Task" [ 764.772076] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.772076] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e05abf1b-6eaf-4455-9a11-6120dee14105 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.786930] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199616, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.788422] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 764.788422] env[62204]: value = "task-1199617" [ 764.788422] env[62204]: _type = "Task" [ 764.788422] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.797942] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199617, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.961362] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199615, 'name': CreateVM_Task, 'duration_secs': 0.493841} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.961362] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.961708] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 764.961708] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.961916] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 764.965321] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fd64b1e-d26b-4bb5-87f7-d2ea1a9cbf00 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.971959] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 764.971959] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f76877-684e-1fc0-e81a-3a15b6d00518" [ 764.971959] env[62204]: _type = "Task" [ 764.971959] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.981558] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f76877-684e-1fc0-e81a-3a15b6d00518, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.143778] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1292ef-fefd-487f-9b7b-5c6cf90cd001 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.153484] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d536b429-d946-4118-a41c-97c109ea6ca9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.190189] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f88e5e-e4a9-4713-80c2-6e3d7cb489d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.201224] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeafff8e-8116-4139-8504-c263fd75a254 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.215871] env[62204]: DEBUG nova.compute.provider_tree [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 765.285178] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199616, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.298069] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199617, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077259} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.298361] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.299167] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03708fa-daf0-4476-b93a-7ca127eac8c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.323694] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 703bf0c4-9bff-4967-8e84-09969b32b5a1/703bf0c4-9bff-4967-8e84-09969b32b5a1.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 765.323861] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f91beaa-ec6f-4f25-96f5-4ed63bbb95f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.339433] env[62204]: DEBUG nova.network.neutron [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Updated VIF entry in instance network info cache for port 41f6daea-83a0-45cc-a106-3528a62481e1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 765.339433] env[62204]: DEBUG nova.network.neutron [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Updating instance_info_cache with network_info: [{"id": "41f6daea-83a0-45cc-a106-3528a62481e1", "address": "fa:16:3e:d2:af:6b", "network": {"id": "29c2c6b3-02eb-4b1a-9351-8baf33c0d9a5", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1012926608-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "749474347b5d417197e01fcca204d3d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41f6daea-83", "ovs_interfaceid": "41f6daea-83a0-45cc-a106-3528a62481e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.347189] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 765.347189] env[62204]: value = "task-1199618" [ 765.347189] env[62204]: _type = "Task" [ 765.347189] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.356943] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199618, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.484826] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f76877-684e-1fc0-e81a-3a15b6d00518, 'name': SearchDatastore_Task, 'duration_secs': 0.012933} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.485492] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.485925] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.486341] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.486749] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.487053] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.487420] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7d10a51-5016-4bc7-b032-5406be543eac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.497856] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.498319] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.499194] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ce4824-66d5-47e4-a565-20a110973e56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.505669] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 765.505669] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b88f56-76f7-2c78-2437-edcbf2a24330" [ 765.505669] env[62204]: _type = "Task" [ 765.505669] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.514976] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b88f56-76f7-2c78-2437-edcbf2a24330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.723023] env[62204]: DEBUG nova.scheduler.client.report [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.785734] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199616, 'name': ReconfigVM_Task, 'duration_secs': 0.717238} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.785957] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f/4793e9fd-be87-4885-8f0e-1fcef6ce4d2f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.786607] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-694fde3f-86c0-4f3b-90fa-12b0d35fe040 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.793346] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 765.793346] env[62204]: value = "task-1199619" [ 765.793346] env[62204]: _type = "Task" [ 765.793346] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.801293] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199619, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.841570] env[62204]: DEBUG oslo_concurrency.lockutils [req-650b3354-6656-441c-b170-6a4104354786 req-07f0065b-36b0-4cd1-8ac0-62c06c835276 service nova] Releasing lock "refresh_cache-ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.857949] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199618, 'name': ReconfigVM_Task, 'duration_secs': 0.419164} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.858449] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 703bf0c4-9bff-4967-8e84-09969b32b5a1/703bf0c4-9bff-4967-8e84-09969b32b5a1.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 765.859191] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c632e9f-aa20-4046-b933-3e2acb765369 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.867809] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 765.867809] env[62204]: value = "task-1199620" [ 765.867809] env[62204]: _type = "Task" [ 765.867809] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.876810] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199620, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.018976] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b88f56-76f7-2c78-2437-edcbf2a24330, 'name': SearchDatastore_Task, 'duration_secs': 0.041972} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.020015] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc2cd652-112b-4027-a3e7-6258b811c3f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.026027] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 766.026027] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52600ad8-eff4-acac-4505-33e521e0dc80" [ 766.026027] env[62204]: _type = "Task" [ 766.026027] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.034253] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52600ad8-eff4-acac-4505-33e521e0dc80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.226890] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.228032] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.085s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 766.229522] env[62204]: INFO nova.compute.claims [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 766.253794] env[62204]: INFO nova.scheduler.client.report [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Deleted allocations for instance b0180c2b-8edf-4d15-8d12-c754b73f6030 [ 766.303866] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199619, 'name': Rename_Task, 'duration_secs': 0.156169} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.304255] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.304507] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2cd0e676-c82b-46cc-9023-43327d1a4d99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.311815] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 766.311815] env[62204]: value = "task-1199621" [ 766.311815] env[62204]: _type = "Task" [ 766.311815] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.319590] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.379177] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199620, 'name': Rename_Task, 'duration_secs': 0.187497} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.379573] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.379915] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47be91c1-adee-44eb-89f2-323c0e497d32 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.387467] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 766.387467] env[62204]: value = "task-1199622" [ 766.387467] env[62204]: _type = "Task" [ 766.387467] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.397901] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.544216] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52600ad8-eff4-acac-4505-33e521e0dc80, 'name': SearchDatastore_Task, 'duration_secs': 0.010239} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.544216] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.544216] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702/ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.544216] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d175f940-da51-4a7a-9edd-529146cf7ca1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.552988] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 766.552988] env[62204]: value = "task-1199623" [ 766.552988] env[62204]: _type = "Task" [ 766.552988] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.562483] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.764163] env[62204]: DEBUG oslo_concurrency.lockutils [None req-967037ad-638c-40ac-a321-b002ab006738 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "b0180c2b-8edf-4d15-8d12-c754b73f6030" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.527s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.829582] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199621, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.909752] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199622, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.065517] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199623, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493971} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.065991] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702/ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.066559] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.066935] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62cfe11a-237f-45f5-b1bf-9343cd5d808e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.078365] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 767.078365] env[62204]: value = "task-1199624" [ 767.078365] env[62204]: _type = "Task" [ 767.078365] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.086993] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199624, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.328678] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199621, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.401746] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199622, 'name': PowerOnVM_Task, 'duration_secs': 0.553244} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.405121] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.405577] env[62204]: INFO nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Took 7.92 seconds to spawn the instance on the hypervisor. [ 767.405767] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 767.406989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcaab1c-a236-44fa-b54f-05c3dc081b84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.587980] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199624, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069857} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.588400] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.589220] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f69a79-89be-4e05-aed0-7215351d26b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.615394] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702/ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.619360] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac93c827-1c69-43bf-9eda-61968d21d295 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.643942] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 767.643942] env[62204]: value = "task-1199625" [ 767.643942] env[62204]: _type = "Task" [ 767.643942] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.650424] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199625, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.743380] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3139c992-d025-4043-b3ba-185e8f62dc29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.751711] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ddbda7-49a4-4f33-97f5-59b70594a7d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.782945] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c25a5b-071e-4e6b-9584-592ef714b92c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.791215] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb42acc2-bb57-48bc-86f9-768ce6904043 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.806646] env[62204]: DEBUG nova.compute.provider_tree [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.827441] env[62204]: DEBUG oslo_vmware.api [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199621, 'name': PowerOnVM_Task, 'duration_secs': 1.176666} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.827441] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.827441] env[62204]: INFO nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Took 10.91 seconds to spawn the instance on the hypervisor. [ 767.827441] env[62204]: DEBUG nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 767.828132] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11885df-060c-4c72-9252-4a4fb282e598 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.931101] env[62204]: INFO nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Took 34.34 seconds to build instance. [ 768.153323] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.310265] env[62204]: DEBUG nova.scheduler.client.report [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.351607] env[62204]: INFO nova.compute.manager [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Took 36.80 seconds to build instance. [ 768.433810] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.117s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.661827] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199625, 'name': ReconfigVM_Task, 'duration_secs': 0.884607} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.662062] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Reconfigured VM instance instance-00000038 to attach disk [datastore1] ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702/ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.662799] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e40a62e1-7ac9-41b3-92d7-05a63f2dd8bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.671249] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 768.671249] env[62204]: value = "task-1199626" [ 768.671249] env[62204]: _type = "Task" [ 768.671249] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.680566] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199626, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.816056] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.817412] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 768.820833] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.651s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.821118] env[62204]: DEBUG nova.objects.instance [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lazy-loading 'resources' on Instance uuid 186a2de8-2b9e-4c84-8502-cb0ed3b43123 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 768.856638] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c4c66124-398f-46e9-8531-fb30046559a9 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.964s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.940376] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.188024] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199626, 'name': Rename_Task, 'duration_secs': 0.301423} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.189049] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 769.189049] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e20f3c6d-1520-4bec-8599-635d47a4fe27 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.198020] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 769.198020] env[62204]: value = "task-1199627" [ 769.198020] env[62204]: _type = "Task" [ 769.198020] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.207093] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199627, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.215647] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "2b728904-19ef-4773-9260-c615da522801" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.215898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "2b728904-19ef-4773-9260-c615da522801" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.328056] env[62204]: DEBUG nova.compute.utils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 769.332924] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 769.333111] env[62204]: DEBUG nova.network.neutron [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 769.362103] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 769.394463] env[62204]: DEBUG nova.policy [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6054f141cad7421f85bbb5944f408070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6907df6f17b142c0b4881f15f3b88a9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 769.463991] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.709810] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199627, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.817776] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.818078] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.836686] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 769.897782] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.899781] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65a3006-de8b-496e-9f2a-837cc0bdaed8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.903226] env[62204]: DEBUG nova.network.neutron [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Successfully created port: e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.911297] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb27c1a1-7595-4378-9f8f-4cc01a599bfa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.949893] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160a79eb-519a-4a2b-b4de-d29ab75638c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.960244] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dbee6c-4f01-4228-97c3-25df2b93ae9d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.982710] env[62204]: DEBUG nova.compute.provider_tree [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.211823] env[62204]: DEBUG oslo_vmware.api [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199627, 'name': PowerOnVM_Task, 'duration_secs': 0.860364} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.212146] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.212383] env[62204]: INFO nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Took 8.18 seconds to spawn the instance on the hypervisor. [ 770.212577] env[62204]: DEBUG nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 770.213677] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df17f552-89d4-44af-8a98-9985bfb47357 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.407623] env[62204]: DEBUG nova.compute.manager [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Received event network-changed-ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 770.407818] env[62204]: DEBUG nova.compute.manager [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Refreshing instance network info cache due to event network-changed-ac345dde-4672-4c9d-a224-24ebc7900628. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 770.409427] env[62204]: DEBUG oslo_concurrency.lockutils [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] Acquiring lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.409427] env[62204]: DEBUG oslo_concurrency.lockutils [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] Acquired lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.409427] env[62204]: DEBUG nova.network.neutron [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Refreshing network info cache for port ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 770.485787] env[62204]: DEBUG nova.scheduler.client.report [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 770.739061] env[62204]: INFO nova.compute.manager [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Took 36.15 seconds to build instance. [ 770.847000] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 770.872292] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.872292] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.872292] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.872521] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.873387] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.873387] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.873387] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.873387] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.873575] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.873718] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.873900] env[62204]: DEBUG nova.virt.hardware [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.874889] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7643292a-fb7f-406f-b8fe-e01c9f6eb55f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.883794] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3f2218-3bfd-4ca9-b011-7f3fa9ff6e6c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.887723] env[62204]: DEBUG nova.compute.manager [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 770.888435] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d9c82e-e20e-44cd-a035-0b270309a60e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.992501] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.995154] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.720s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.997011] env[62204]: INFO nova.compute.claims [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.018650] env[62204]: INFO nova.scheduler.client.report [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Deleted allocations for instance 186a2de8-2b9e-4c84-8502-cb0ed3b43123 [ 771.225798] env[62204]: DEBUG nova.network.neutron [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updated VIF entry in instance network info cache for port ac345dde-4672-4c9d-a224-24ebc7900628. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 771.226114] env[62204]: DEBUG nova.network.neutron [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating instance_info_cache with network_info: [{"id": "ac345dde-4672-4c9d-a224-24ebc7900628", "address": "fa:16:3e:41:0f:f2", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac345dde-46", "ovs_interfaceid": "ac345dde-4672-4c9d-a224-24ebc7900628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.242087] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3324d7c0-15a6-4f55-8a98-43ba9b76e6df tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.889s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.408333] env[62204]: INFO nova.compute.manager [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] instance snapshotting [ 771.408333] env[62204]: DEBUG nova.objects.instance [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'flavor' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 771.477987] env[62204]: DEBUG nova.compute.manager [req-f3829a75-e924-4525-94a5-4a1d19b5aec8 req-6dcfbaed-34c2-4d6b-bb13-ba9ec62849e4 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Received event network-vif-plugged-e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 771.477987] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3829a75-e924-4525-94a5-4a1d19b5aec8 req-6dcfbaed-34c2-4d6b-bb13-ba9ec62849e4 service nova] Acquiring lock "137ce499-6602-46b5-b1eb-b03282c2bab3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.477987] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3829a75-e924-4525-94a5-4a1d19b5aec8 req-6dcfbaed-34c2-4d6b-bb13-ba9ec62849e4 service nova] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.477987] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3829a75-e924-4525-94a5-4a1d19b5aec8 req-6dcfbaed-34c2-4d6b-bb13-ba9ec62849e4 service nova] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.477987] env[62204]: DEBUG nova.compute.manager [req-f3829a75-e924-4525-94a5-4a1d19b5aec8 req-6dcfbaed-34c2-4d6b-bb13-ba9ec62849e4 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] No waiting events found dispatching network-vif-plugged-e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 771.477987] env[62204]: WARNING nova.compute.manager [req-f3829a75-e924-4525-94a5-4a1d19b5aec8 req-6dcfbaed-34c2-4d6b-bb13-ba9ec62849e4 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Received unexpected event network-vif-plugged-e4c2acaa-505a-49ac-bc57-647785259723 for instance with vm_state building and task_state spawning. [ 771.528722] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f018224a-5af8-4bc0-8fb5-f3b7979bd221 tempest-ImagesNegativeTestJSON-1430580921 tempest-ImagesNegativeTestJSON-1430580921-project-member] Lock "186a2de8-2b9e-4c84-8502-cb0ed3b43123" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.408s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.567178] env[62204]: DEBUG nova.network.neutron [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Successfully updated port: e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.728580] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "703bf0c4-9bff-4967-8e84-09969b32b5a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.728875] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.729468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "703bf0c4-9bff-4967-8e84-09969b32b5a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.729468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.729468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.731406] env[62204]: DEBUG oslo_concurrency.lockutils [req-afed7e50-fc03-4c6f-9984-cc9f2e08ec27 req-10af9b3e-0107-423e-ad95-f8e08b6b836c service nova] Releasing lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.732007] env[62204]: INFO nova.compute.manager [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Terminating instance [ 771.734131] env[62204]: DEBUG nova.compute.manager [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 771.734220] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.735043] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ecf49a-f915-4c5d-ad53-8933938fb93d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.744225] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 771.746848] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.747975] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0bd2818-fd1e-4ca7-8060-134d99c82ec4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.761837] env[62204]: DEBUG oslo_vmware.api [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 771.761837] env[62204]: value = "task-1199628" [ 771.761837] env[62204]: _type = "Task" [ 771.761837] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.775214] env[62204]: DEBUG oslo_vmware.api [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199628, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.798176] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.798729] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.800410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.800410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 771.800410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.802443] env[62204]: INFO nova.compute.manager [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Terminating instance [ 771.804510] env[62204]: DEBUG nova.compute.manager [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 771.804840] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.805985] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942f07bf-2b4b-41cc-b4a8-fcb6ab6ada3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.815280] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.815724] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-813f8db7-1623-40ed-b7c3-0f5fcd27e239 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.823752] env[62204]: DEBUG oslo_vmware.api [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 771.823752] env[62204]: value = "task-1199629" [ 771.823752] env[62204]: _type = "Task" [ 771.823752] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.835527] env[62204]: DEBUG oslo_vmware.api [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.915021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b1b263-981f-48e3-b33a-f9e507f0ca8a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.945333] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11571f66-c375-4615-9a66-0f472942c9fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.069273] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-137ce499-6602-46b5-b1eb-b03282c2bab3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.069273] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-137ce499-6602-46b5-b1eb-b03282c2bab3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.069273] env[62204]: DEBUG nova.network.neutron [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 772.260545] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.273210] env[62204]: DEBUG oslo_vmware.api [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199628, 'name': PowerOffVM_Task, 'duration_secs': 0.300319} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.273485] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.273652] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 772.273901] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a806ec1-4355-450f-b9ed-89f9bab6dcbf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.333318] env[62204]: DEBUG oslo_vmware.api [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199629, 'name': PowerOffVM_Task, 'duration_secs': 0.243705} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.335733] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.335911] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 772.336443] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8566e86c-57f0-4a59-8e35-cc74af2fe6f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.356704] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9000a491-fb44-43e7-99d3-927727b6801b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.365844] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d11a217-58b4-4e8a-9ef9-bdb9791989b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.399019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4c789a-c3b7-4a77-be3c-418a6f8b353a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.407338] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b97a2a-fdfc-4857-8a68-e5077962ade2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.424166] env[62204]: DEBUG nova.compute.provider_tree [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.462505] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 772.462505] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-39ddedb4-91f4-4bcf-9438-a4892f7eea6d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.472919] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 772.473248] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 772.473512] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleting the datastore file [datastore1] ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 772.474762] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d03e5d2d-ae09-4c4b-9dc3-ffc13a604eea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.476901] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 772.476901] env[62204]: value = "task-1199632" [ 772.476901] env[62204]: _type = "Task" [ 772.476901] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.482116] env[62204]: DEBUG oslo_vmware.api [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 772.482116] env[62204]: value = "task-1199633" [ 772.482116] env[62204]: _type = "Task" [ 772.482116] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.490132] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199632, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.491426] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 772.491745] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 772.492769] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleting the datastore file [datastore1] 703bf0c4-9bff-4967-8e84-09969b32b5a1 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 772.493653] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67510205-43c0-4364-878e-4d2e3baff9a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.499409] env[62204]: DEBUG oslo_vmware.api [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.505747] env[62204]: DEBUG oslo_vmware.api [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for the task: (returnval){ [ 772.505747] env[62204]: value = "task-1199634" [ 772.505747] env[62204]: _type = "Task" [ 772.505747] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.516211] env[62204]: DEBUG oslo_vmware.api [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199634, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.616425] env[62204]: DEBUG nova.network.neutron [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 772.791338] env[62204]: DEBUG nova.network.neutron [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Updating instance_info_cache with network_info: [{"id": "e4c2acaa-505a-49ac-bc57-647785259723", "address": "fa:16:3e:cc:6c:68", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c2acaa-50", "ovs_interfaceid": "e4c2acaa-505a-49ac-bc57-647785259723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.928247] env[62204]: DEBUG nova.scheduler.client.report [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 772.995062] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199632, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.995375] env[62204]: DEBUG oslo_vmware.api [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151567} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.995641] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.995839] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.996246] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.996246] env[62204]: INFO nova.compute.manager [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Took 1.19 seconds to destroy the instance on the hypervisor. [ 772.996540] env[62204]: DEBUG oslo.service.loopingcall [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 772.996823] env[62204]: DEBUG nova.compute.manager [-] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 772.996938] env[62204]: DEBUG nova.network.neutron [-] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 773.017134] env[62204]: DEBUG oslo_vmware.api [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Task: {'id': task-1199634, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154048} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.017428] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 773.017653] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 773.017862] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 773.018066] env[62204]: INFO nova.compute.manager [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Took 1.28 seconds to destroy the instance on the hypervisor. [ 773.018489] env[62204]: DEBUG oslo.service.loopingcall [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.018489] env[62204]: DEBUG nova.compute.manager [-] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 773.018598] env[62204]: DEBUG nova.network.neutron [-] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 773.294922] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-137ce499-6602-46b5-b1eb-b03282c2bab3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.294922] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Instance network_info: |[{"id": "e4c2acaa-505a-49ac-bc57-647785259723", "address": "fa:16:3e:cc:6c:68", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c2acaa-50", "ovs_interfaceid": "e4c2acaa-505a-49ac-bc57-647785259723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 773.295255] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:6c:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4c2acaa-505a-49ac-bc57-647785259723', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.303676] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating folder: Project (6907df6f17b142c0b4881f15f3b88a9f). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.303926] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db479a11-c3fa-44c4-af18-49cc9aeefb4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.316532] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created folder: Project (6907df6f17b142c0b4881f15f3b88a9f) in parent group-v259933. [ 773.316781] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating folder: Instances. Parent ref: group-v260006. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.317043] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58d1bfc0-8759-4616-8856-47ef4f6fb228 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.327672] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created folder: Instances in parent group-v260006. [ 773.327970] env[62204]: DEBUG oslo.service.loopingcall [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 773.328193] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.328410] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2230cd2a-9d4e-45cc-981d-b101d3ffea7f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.352097] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.352097] env[62204]: value = "task-1199637" [ 773.352097] env[62204]: _type = "Task" [ 773.352097] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.362843] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199637, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.441762] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.446s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.442191] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 773.445024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.181s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.447046] env[62204]: INFO nova.compute.claims [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.468990] env[62204]: DEBUG nova.compute.manager [req-f4959122-9161-49df-8a5c-429130bdab22 req-b4a4731b-0d3e-4b29-a52b-4a7537c89e47 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Received event network-vif-deleted-fc55488d-fc4c-43c1-9d23-fcd59ee87c5b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.469756] env[62204]: INFO nova.compute.manager [req-f4959122-9161-49df-8a5c-429130bdab22 req-b4a4731b-0d3e-4b29-a52b-4a7537c89e47 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Neutron deleted interface fc55488d-fc4c-43c1-9d23-fcd59ee87c5b; detaching it from the instance and deleting it from the info cache [ 773.469756] env[62204]: DEBUG nova.network.neutron [req-f4959122-9161-49df-8a5c-429130bdab22 req-b4a4731b-0d3e-4b29-a52b-4a7537c89e47 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.493774] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199632, 'name': CreateSnapshot_Task, 'duration_secs': 0.53213} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.494203] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 773.495123] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9039a55-2137-4fb0-84e0-6b2237054b02 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.510797] env[62204]: DEBUG nova.compute.manager [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Received event network-changed-e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.511130] env[62204]: DEBUG nova.compute.manager [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Refreshing instance network info cache due to event network-changed-e4c2acaa-505a-49ac-bc57-647785259723. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 773.511436] env[62204]: DEBUG oslo_concurrency.lockutils [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] Acquiring lock "refresh_cache-137ce499-6602-46b5-b1eb-b03282c2bab3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.511715] env[62204]: DEBUG oslo_concurrency.lockutils [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] Acquired lock "refresh_cache-137ce499-6602-46b5-b1eb-b03282c2bab3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.511976] env[62204]: DEBUG nova.network.neutron [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Refreshing network info cache for port e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 773.769938] env[62204]: DEBUG nova.network.neutron [-] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.862805] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199637, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.940073] env[62204]: DEBUG nova.network.neutron [-] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.951466] env[62204]: DEBUG nova.compute.utils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 773.956208] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 773.956418] env[62204]: DEBUG nova.network.neutron [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 773.972514] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45b12e87-2778-4210-884c-212f17048d31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.982577] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3270370-298e-44f4-adb4-0e4d9c28025c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.008806] env[62204]: DEBUG nova.policy [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e320cc6ef7b94387a37030800980c0ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b84d64bf52e4edebc0f43f92c4e2bab', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 774.026099] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 774.028645] env[62204]: DEBUG nova.compute.manager [req-f4959122-9161-49df-8a5c-429130bdab22 req-b4a4731b-0d3e-4b29-a52b-4a7537c89e47 service nova] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Detach interface failed, port_id=fc55488d-fc4c-43c1-9d23-fcd59ee87c5b, reason: Instance 703bf0c4-9bff-4967-8e84-09969b32b5a1 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 774.029095] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1fffc556-ff01-42f7-9bac-80695472efab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.038241] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 774.038241] env[62204]: value = "task-1199638" [ 774.038241] env[62204]: _type = "Task" [ 774.038241] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.047579] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199638, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.275030] env[62204]: INFO nova.compute.manager [-] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Took 1.27 seconds to deallocate network for instance. [ 774.283156] env[62204]: DEBUG nova.network.neutron [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Updated VIF entry in instance network info cache for port e4c2acaa-505a-49ac-bc57-647785259723. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 774.283550] env[62204]: DEBUG nova.network.neutron [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Updating instance_info_cache with network_info: [{"id": "e4c2acaa-505a-49ac-bc57-647785259723", "address": "fa:16:3e:cc:6c:68", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4c2acaa-50", "ovs_interfaceid": "e4c2acaa-505a-49ac-bc57-647785259723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.369074] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199637, 'name': CreateVM_Task, 'duration_secs': 0.681513} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.369074] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.369439] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.369439] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.369940] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 774.370056] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1056f146-8e2d-48c7-ab6f-b86430887607 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.379578] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 774.379578] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5226297d-7940-31d2-d005-d8dcaeb4ea6e" [ 774.379578] env[62204]: _type = "Task" [ 774.379578] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.388714] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5226297d-7940-31d2-d005-d8dcaeb4ea6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.400795] env[62204]: DEBUG nova.network.neutron [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Successfully created port: 5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 774.443159] env[62204]: INFO nova.compute.manager [-] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Took 1.42 seconds to deallocate network for instance. [ 774.459459] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 774.549944] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199638, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.778058] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.788169] env[62204]: DEBUG oslo_concurrency.lockutils [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] Releasing lock "refresh_cache-137ce499-6602-46b5-b1eb-b03282c2bab3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.788169] env[62204]: DEBUG nova.compute.manager [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Received event network-vif-deleted-41f6daea-83a0-45cc-a106-3528a62481e1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 774.788169] env[62204]: INFO nova.compute.manager [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Neutron deleted interface 41f6daea-83a0-45cc-a106-3528a62481e1; detaching it from the instance and deleting it from the info cache [ 774.788169] env[62204]: DEBUG nova.network.neutron [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.858038] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec6baed-49c6-405e-a756-5c3654b243d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.866780] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5066ca37-2c71-4486-ac9d-e8de9a82640b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.901939] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fc6889-507e-4675-aae9-717dd1d415d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.913892] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5226297d-7940-31d2-d005-d8dcaeb4ea6e, 'name': SearchDatastore_Task, 'duration_secs': 0.01409} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.917086] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.917362] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.917606] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.917753] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.917934] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.918277] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8022859-e84d-40c9-b460-3b87f1f3bc1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.922211] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f2536a-db39-4a46-9085-6894d197c4af {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.942593] env[62204]: DEBUG nova.compute.provider_tree [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.948026] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.948026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.948026] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94cca17a-1fa8-4e7f-ae43-65ff67a93412 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.950875] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.954193] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 774.954193] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524d23e4-93be-27f8-4af6-6d5b459b04d6" [ 774.954193] env[62204]: _type = "Task" [ 774.954193] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.971395] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524d23e4-93be-27f8-4af6-6d5b459b04d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.049986] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199638, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.290843] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e09d7fd5-3cde-4cad-a7f0-49fa96359e37 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.301680] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4698a3-3766-4817-a419-f8842ca3563e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.329705] env[62204]: DEBUG nova.compute.manager [req-0eac823e-7291-48ff-9c73-e2cbc8d366d5 req-132b5e98-210b-451f-b1e2-10fc29d38181 service nova] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Detach interface failed, port_id=41f6daea-83a0-45cc-a106-3528a62481e1, reason: Instance ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 775.448268] env[62204]: DEBUG nova.scheduler.client.report [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.467227] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524d23e4-93be-27f8-4af6-6d5b459b04d6, 'name': SearchDatastore_Task, 'duration_secs': 0.012384} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.468809] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-875f7145-0036-492b-9ec9-449c1ba201a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.473798] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 775.477698] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 775.477698] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5294e5a0-6401-046f-d517-505c847cd9ac" [ 775.477698] env[62204]: _type = "Task" [ 775.477698] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.486937] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5294e5a0-6401-046f-d517-505c847cd9ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.501301] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 775.501549] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 775.501747] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 775.501999] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 775.502177] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 775.502326] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 775.502533] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 775.502687] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 775.502847] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 775.503010] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 775.503185] env[62204]: DEBUG nova.virt.hardware [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 775.504120] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e5e781-7f55-4190-b8d4-e7e2774cd774 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.513015] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f656b49-d017-4bab-b52d-d12c743a1d0e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.549369] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199638, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.888931] env[62204]: DEBUG nova.compute.manager [req-15d13652-d49b-4708-8b9d-b542211f4d82 req-88ee4fbd-371a-4ef6-9639-832fcdf2f22f service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Received event network-vif-plugged-5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 775.889311] env[62204]: DEBUG oslo_concurrency.lockutils [req-15d13652-d49b-4708-8b9d-b542211f4d82 req-88ee4fbd-371a-4ef6-9639-832fcdf2f22f service nova] Acquiring lock "d6370e37-6f73-4334-8057-a30aa2c39682-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.889409] env[62204]: DEBUG oslo_concurrency.lockutils [req-15d13652-d49b-4708-8b9d-b542211f4d82 req-88ee4fbd-371a-4ef6-9639-832fcdf2f22f service nova] Lock "d6370e37-6f73-4334-8057-a30aa2c39682-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.890255] env[62204]: DEBUG oslo_concurrency.lockutils [req-15d13652-d49b-4708-8b9d-b542211f4d82 req-88ee4fbd-371a-4ef6-9639-832fcdf2f22f service nova] Lock "d6370e37-6f73-4334-8057-a30aa2c39682-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.890255] env[62204]: DEBUG nova.compute.manager [req-15d13652-d49b-4708-8b9d-b542211f4d82 req-88ee4fbd-371a-4ef6-9639-832fcdf2f22f service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] No waiting events found dispatching network-vif-plugged-5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 775.890255] env[62204]: WARNING nova.compute.manager [req-15d13652-d49b-4708-8b9d-b542211f4d82 req-88ee4fbd-371a-4ef6-9639-832fcdf2f22f service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Received unexpected event network-vif-plugged-5418f42e-f30f-475c-bbfd-e2ca7e8921ef for instance with vm_state building and task_state spawning. [ 775.953628] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.954414] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 775.957664] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.022s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.959447] env[62204]: INFO nova.compute.claims [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.991414] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5294e5a0-6401-046f-d517-505c847cd9ac, 'name': SearchDatastore_Task, 'duration_secs': 0.012662} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.991679] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.991936] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 137ce499-6602-46b5-b1eb-b03282c2bab3/137ce499-6602-46b5-b1eb-b03282c2bab3.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.992211] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d77be5a5-d253-41e7-8924-1088d45fe06f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.004238] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 776.004238] env[62204]: value = "task-1199639" [ 776.004238] env[62204]: _type = "Task" [ 776.004238] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.020344] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199639, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.052319] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199638, 'name': CloneVM_Task, 'duration_secs': 1.531727} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.052634] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created linked-clone VM from snapshot [ 776.053404] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660dc0df-4a86-4c8c-9018-ebce9a281f4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.061792] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Uploading image 8831d989-636e-474d-a1bb-9b95868f4ba9 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 776.094721] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 776.094721] env[62204]: value = "vm-260009" [ 776.094721] env[62204]: _type = "VirtualMachine" [ 776.094721] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 776.095000] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c12520d4-3b03-4e0b-9318-feb8f18ff126 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.104690] env[62204]: DEBUG nova.network.neutron [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Successfully updated port: 5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 776.111023] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease: (returnval){ [ 776.111023] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525cde71-adee-4395-57a6-4369cb715509" [ 776.111023] env[62204]: _type = "HttpNfcLease" [ 776.111023] env[62204]: } obtained for exporting VM: (result){ [ 776.111023] env[62204]: value = "vm-260009" [ 776.111023] env[62204]: _type = "VirtualMachine" [ 776.111023] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 776.111023] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the lease: (returnval){ [ 776.111023] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525cde71-adee-4395-57a6-4369cb715509" [ 776.111023] env[62204]: _type = "HttpNfcLease" [ 776.111023] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 776.117263] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 776.117263] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525cde71-adee-4395-57a6-4369cb715509" [ 776.117263] env[62204]: _type = "HttpNfcLease" [ 776.117263] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 776.468121] env[62204]: DEBUG nova.compute.utils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 776.472169] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 776.472169] env[62204]: DEBUG nova.network.neutron [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 776.521257] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199639, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.522874] env[62204]: DEBUG nova.policy [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '482111f8541e40cb91ab69a363845043', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f1dbef99d9946d58fbe59f2850f6c63', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 776.609944] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "refresh_cache-d6370e37-6f73-4334-8057-a30aa2c39682" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 776.610132] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquired lock "refresh_cache-d6370e37-6f73-4334-8057-a30aa2c39682" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.610326] env[62204]: DEBUG nova.network.neutron [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.620495] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 776.620495] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525cde71-adee-4395-57a6-4369cb715509" [ 776.620495] env[62204]: _type = "HttpNfcLease" [ 776.620495] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 776.620788] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 776.620788] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525cde71-adee-4395-57a6-4369cb715509" [ 776.620788] env[62204]: _type = "HttpNfcLease" [ 776.620788] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 776.622925] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28659d03-bddd-4566-b57a-3820432f12b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.634017] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f570-5572-e4fc-e21d-e8a520d81efd/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 776.634200] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f570-5572-e4fc-e21d-e8a520d81efd/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 776.728756] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7245e933-5a2c-46a7-9282-4fb006b08973 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.807910] env[62204]: DEBUG nova.network.neutron [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Successfully created port: cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.972873] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 777.018694] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199639, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53899} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.019822] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 137ce499-6602-46b5-b1eb-b03282c2bab3/137ce499-6602-46b5-b1eb-b03282c2bab3.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.020206] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.023315] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ba2d97a-695b-485f-9bfd-fe6335b4481e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.031403] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 777.031403] env[62204]: value = "task-1199641" [ 777.031403] env[62204]: _type = "Task" [ 777.031403] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.043185] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199641, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.161647] env[62204]: DEBUG nova.network.neutron [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.327848] env[62204]: DEBUG nova.network.neutron [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Updating instance_info_cache with network_info: [{"id": "5418f42e-f30f-475c-bbfd-e2ca7e8921ef", "address": "fa:16:3e:13:0d:32", "network": {"id": "2ee0d9a3-0ac6-4bbd-894a-01c1b66e19c3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1765972549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b84d64bf52e4edebc0f43f92c4e2bab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5418f42e-f3", "ovs_interfaceid": "5418f42e-f30f-475c-bbfd-e2ca7e8921ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.491297] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6819d2a-8b0d-4137-b3d4-5fa3b2e80b56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.502602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f187e60-befe-4a14-b233-449c9235f1f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.538850] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d6a3f0-321c-47c3-933f-a15582109ba6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.550287] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f184db5-55a1-41e1-9646-c58f8a787c7e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.554387] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199641, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07349} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.555425] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.556596] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a434b4-968b-40ac-ba74-2a7e62a65a3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.568552] env[62204]: DEBUG nova.compute.provider_tree [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.589365] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 137ce499-6602-46b5-b1eb-b03282c2bab3/137ce499-6602-46b5-b1eb-b03282c2bab3.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.590057] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6616acd3-6f16-45c0-9deb-97cb8c99d282 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.616276] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 777.616276] env[62204]: value = "task-1199642" [ 777.616276] env[62204]: _type = "Task" [ 777.616276] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.629530] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199642, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.834334] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Releasing lock "refresh_cache-d6370e37-6f73-4334-8057-a30aa2c39682" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 777.835073] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Instance network_info: |[{"id": "5418f42e-f30f-475c-bbfd-e2ca7e8921ef", "address": "fa:16:3e:13:0d:32", "network": {"id": "2ee0d9a3-0ac6-4bbd-894a-01c1b66e19c3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1765972549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b84d64bf52e4edebc0f43f92c4e2bab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5418f42e-f3", "ovs_interfaceid": "5418f42e-f30f-475c-bbfd-e2ca7e8921ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 777.835493] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:0d:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16e15a36-a55b-4c27-b864-f284339009d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5418f42e-f30f-475c-bbfd-e2ca7e8921ef', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 777.843163] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Creating folder: Project (1b84d64bf52e4edebc0f43f92c4e2bab). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.843655] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f186cf9-93df-4b23-abfb-a26f971409a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.858984] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Created folder: Project (1b84d64bf52e4edebc0f43f92c4e2bab) in parent group-v259933. [ 777.859369] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Creating folder: Instances. Parent ref: group-v260010. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 777.859679] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d85ac607-fa66-4eac-bb1d-d9e1236db48b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.874074] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Created folder: Instances in parent group-v260010. [ 777.874560] env[62204]: DEBUG oslo.service.loopingcall [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 777.874899] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 777.875253] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72230514-243e-4683-98d0-8af7cb79b16c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.899548] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 777.899548] env[62204]: value = "task-1199645" [ 777.899548] env[62204]: _type = "Task" [ 777.899548] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.909279] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199645, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.930066] env[62204]: DEBUG nova.compute.manager [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Received event network-changed-5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 777.930066] env[62204]: DEBUG nova.compute.manager [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Refreshing instance network info cache due to event network-changed-5418f42e-f30f-475c-bbfd-e2ca7e8921ef. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 777.930066] env[62204]: DEBUG oslo_concurrency.lockutils [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] Acquiring lock "refresh_cache-d6370e37-6f73-4334-8057-a30aa2c39682" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.930486] env[62204]: DEBUG oslo_concurrency.lockutils [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] Acquired lock "refresh_cache-d6370e37-6f73-4334-8057-a30aa2c39682" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.930486] env[62204]: DEBUG nova.network.neutron [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Refreshing network info cache for port 5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 777.999078] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 778.071397] env[62204]: DEBUG nova.scheduler.client.report [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 778.128064] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199642, 'name': ReconfigVM_Task, 'duration_secs': 0.341852} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.128610] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 137ce499-6602-46b5-b1eb-b03282c2bab3/137ce499-6602-46b5-b1eb-b03282c2bab3.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.129360] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29007445-a1bc-4507-909e-31306e2408ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.137017] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 778.137017] env[62204]: value = "task-1199646" [ 778.137017] env[62204]: _type = "Task" [ 778.137017] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.146367] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199646, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.411720] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199645, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.501919] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 778.502221] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 778.502481] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 778.502795] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 778.502923] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 778.503198] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 778.503512] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 778.503694] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 778.503873] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 778.504050] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 778.504237] env[62204]: DEBUG nova.virt.hardware [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 778.505152] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf3cf36-4649-4553-abf7-5d5f5cdbf858 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.514327] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e92a2c-58ed-4e1a-b92e-3ae2d7a9077c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.577464] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.578075] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 778.580720] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.138s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.635172] env[62204]: DEBUG nova.network.neutron [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Successfully updated port: cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 778.655343] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199646, 'name': Rename_Task, 'duration_secs': 0.17209} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.655665] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.656051] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4155492-30ee-4002-b335-8e3a46883949 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.667598] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 778.667598] env[62204]: value = "task-1199647" [ 778.667598] env[62204]: _type = "Task" [ 778.667598] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.681029] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199647, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.694341] env[62204]: DEBUG nova.network.neutron [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Updated VIF entry in instance network info cache for port 5418f42e-f30f-475c-bbfd-e2ca7e8921ef. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 778.694728] env[62204]: DEBUG nova.network.neutron [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Updating instance_info_cache with network_info: [{"id": "5418f42e-f30f-475c-bbfd-e2ca7e8921ef", "address": "fa:16:3e:13:0d:32", "network": {"id": "2ee0d9a3-0ac6-4bbd-894a-01c1b66e19c3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1765972549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b84d64bf52e4edebc0f43f92c4e2bab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5418f42e-f3", "ovs_interfaceid": "5418f42e-f30f-475c-bbfd-e2ca7e8921ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.913031] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199645, 'name': CreateVM_Task, 'duration_secs': 0.516629} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.914669] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 778.917869] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.918224] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.919032] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 778.919448] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d398d54b-487b-4c0e-b8ee-bbc5553661d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.927013] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 778.927013] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f0761-b67e-53e2-34b9-4feb8bbccb94" [ 778.927013] env[62204]: _type = "Task" [ 778.927013] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.938602] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f0761-b67e-53e2-34b9-4feb8bbccb94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.084730] env[62204]: DEBUG nova.compute.utils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 779.096258] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 779.096460] env[62204]: DEBUG nova.network.neutron [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 779.138060] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.138226] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.138336] env[62204]: DEBUG nova.network.neutron [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 779.152407] env[62204]: DEBUG nova.policy [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd103506c3798439896357b04736a43b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6054de8928a649f289eeb55fc544a8ee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 779.178578] env[62204]: DEBUG oslo_vmware.api [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199647, 'name': PowerOnVM_Task, 'duration_secs': 0.507282} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.178904] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.179137] env[62204]: INFO nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Took 8.33 seconds to spawn the instance on the hypervisor. [ 779.179317] env[62204]: DEBUG nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 779.180125] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00e776b-a934-4aec-89c4-118fec747042 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.199405] env[62204]: DEBUG oslo_concurrency.lockutils [req-13ad9d25-544c-48f5-8062-9cd5839f9664 req-4677bf8e-f2da-462f-ac7b-bfe527eacef5 service nova] Releasing lock "refresh_cache-d6370e37-6f73-4334-8057-a30aa2c39682" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.425464] env[62204]: DEBUG nova.network.neutron [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Successfully created port: 3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.438796] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f0761-b67e-53e2-34b9-4feb8bbccb94, 'name': SearchDatastore_Task, 'duration_secs': 0.016232} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.439188] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.439510] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 779.439802] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.439963] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.440197] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 779.440588] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9ea5e57-7004-454f-86be-cd19fed781a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.451654] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 779.451654] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 779.452302] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a18a6fba-8355-4dab-88f3-4b498d1a0a19 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.459334] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 779.459334] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52533ea3-93a2-4558-ce8f-d3c55392ad8d" [ 779.459334] env[62204]: _type = "Task" [ 779.459334] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.468243] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52533ea3-93a2-4558-ce8f-d3c55392ad8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.599836] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 779.627895] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 432115aa-8999-40fe-a0cb-31433575c912 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 779.628080] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 0a4a432d-a71a-4da7-be90-25dcec5a64c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.628906] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance a71fd192-f3b6-4f0f-900d-887d15f44d7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.628906] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 69604167-6a61-4723-bf7d-7ba168837839 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.628906] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 51c9e353-f2cf-41b4-b37e-1cfd5dca0518 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.628906] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 779.628906] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2727dc46-98ed-435d-89ef-41bc20cda776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.628906] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 55d1649c-5eff-4264-bce1-dd907f9531f2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 779.628906] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.629375] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 703bf0c4-9bff-4967-8e84-09969b32b5a1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 779.629375] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 779.629375] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 137ce499-6602-46b5-b1eb-b03282c2bab3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.629375] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance d6370e37-6f73-4334-8057-a30aa2c39682 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.629502] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance c0990e53-70c9-4536-b26a-bc00bd457c56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.629552] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance cce823b9-6a03-4902-9794-2b93f99eef94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 779.696934] env[62204]: DEBUG nova.network.neutron [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 779.702346] env[62204]: INFO nova.compute.manager [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Took 40.57 seconds to build instance. [ 779.970408] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52533ea3-93a2-4558-ce8f-d3c55392ad8d, 'name': SearchDatastore_Task, 'duration_secs': 0.014958} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.974027] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5caf281c-24a0-4a4e-b40e-bdf87ca0e867 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.981316] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 779.981316] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521ca7fa-15f7-b5a7-78b3-3eabe0a468b9" [ 779.981316] env[62204]: _type = "Task" [ 779.981316] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.992943] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521ca7fa-15f7-b5a7-78b3-3eabe0a468b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.015860] env[62204]: DEBUG nova.network.neutron [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.093853] env[62204]: DEBUG nova.compute.manager [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-vif-plugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 780.094162] env[62204]: DEBUG oslo_concurrency.lockutils [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.094318] env[62204]: DEBUG oslo_concurrency.lockutils [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.094489] env[62204]: DEBUG oslo_concurrency.lockutils [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.094662] env[62204]: DEBUG nova.compute.manager [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] No waiting events found dispatching network-vif-plugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 780.094837] env[62204]: WARNING nova.compute.manager [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received unexpected event network-vif-plugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 for instance with vm_state building and task_state spawning. [ 780.095013] env[62204]: DEBUG nova.compute.manager [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-changed-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 780.095519] env[62204]: DEBUG nova.compute.manager [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Refreshing instance network info cache due to event network-changed-cb48dbbb-646f-445c-89d1-8c4a9e36de59. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 780.096176] env[62204]: DEBUG oslo_concurrency.lockutils [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.134100] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance dba1edda-edfd-4a97-ab95-48f3f5a933f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 780.167657] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "137ce499-6602-46b5-b1eb-b03282c2bab3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.208515] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31baf715-7c3a-4d42-8cc2-4ba8df9a8510 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.558s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.212610] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.044s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.213175] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "137ce499-6602-46b5-b1eb-b03282c2bab3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.213497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.213879] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.216948] env[62204]: INFO nova.compute.manager [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Terminating instance [ 780.221818] env[62204]: DEBUG nova.compute.manager [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 780.221818] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 780.221818] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9edaafe-c2eb-471e-a7a2-9d78e6f7d585 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.230831] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 780.231854] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-181d3cfe-41cc-4df0-a34c-19cc4eabb657 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.242862] env[62204]: DEBUG oslo_vmware.api [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 780.242862] env[62204]: value = "task-1199648" [ 780.242862] env[62204]: _type = "Task" [ 780.242862] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.255427] env[62204]: DEBUG oslo_vmware.api [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.494178] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521ca7fa-15f7-b5a7-78b3-3eabe0a468b9, 'name': SearchDatastore_Task, 'duration_secs': 0.02084} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.494506] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.494781] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d6370e37-6f73-4334-8057-a30aa2c39682/d6370e37-6f73-4334-8057-a30aa2c39682.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 780.495082] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-35f5b1d9-03cc-416f-b098-256784eb5324 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.507738] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 780.507738] env[62204]: value = "task-1199649" [ 780.507738] env[62204]: _type = "Task" [ 780.507738] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.524452] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.524452] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance network_info: |[{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 780.524452] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.524452] env[62204]: DEBUG oslo_concurrency.lockutils [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.524452] env[62204]: DEBUG nova.network.neutron [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Refreshing network info cache for port cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 780.524452] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:96:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '255460d5-71d4-4bfd-87f1-acc10085db7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb48dbbb-646f-445c-89d1-8c4a9e36de59', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.533674] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating folder: Project (7f1dbef99d9946d58fbe59f2850f6c63). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 780.535916] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de908806-730a-4400-a4f1-e4abfd751d78 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.558225] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Created folder: Project (7f1dbef99d9946d58fbe59f2850f6c63) in parent group-v259933. [ 780.558225] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating folder: Instances. Parent ref: group-v260013. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 780.558225] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-538393d6-b8ac-42ff-9edb-f786a265e70b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.574325] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Created folder: Instances in parent group-v260013. [ 780.576038] env[62204]: DEBUG oslo.service.loopingcall [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 780.576038] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.576038] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08e92a15-bbce-4438-95a7-5f7a826eb0d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.600992] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.600992] env[62204]: value = "task-1199652" [ 780.600992] env[62204]: _type = "Task" [ 780.600992] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.610920] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199652, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.612090] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 780.637030] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2c393123-87de-460a-965d-43473478a79f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 780.713409] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 780.719447] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 780.719776] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 780.719962] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.720248] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 780.720456] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.720628] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 780.720849] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 780.721026] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 780.721200] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 780.721371] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 780.721706] env[62204]: DEBUG nova.virt.hardware [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.723245] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f648754c-488d-49ce-8b93-ec42bb41d910 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.733710] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffef1c7-b527-448b-a39e-de57d41748a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.760439] env[62204]: DEBUG oslo_vmware.api [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199648, 'name': PowerOffVM_Task, 'duration_secs': 0.203785} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.760747] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 780.760949] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 780.761250] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b825f9db-8b2a-468b-ab6a-b3214ffb3428 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.842905] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 780.843186] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 780.843441] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleting the datastore file [datastore2] 137ce499-6602-46b5-b1eb-b03282c2bab3 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 780.843818] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2be73648-5837-4080-aa0e-6f26252d7b66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.855682] env[62204]: DEBUG oslo_vmware.api [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 780.855682] env[62204]: value = "task-1199654" [ 780.855682] env[62204]: _type = "Task" [ 780.855682] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.869481] env[62204]: DEBUG oslo_vmware.api [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199654, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.988741] env[62204]: DEBUG nova.network.neutron [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Successfully updated port: 3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.021314] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199649, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.044546] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.044546] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.044767] env[62204]: DEBUG nova.objects.instance [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'flavor' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 781.114881] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199652, 'name': CreateVM_Task, 'duration_secs': 0.439655} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.115220] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 781.116061] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.116250] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.116675] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 781.117081] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6450e0db-0071-437b-b933-ba8217411046 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.129873] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 781.129873] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d32690-aac0-f9f2-cb0d-1d946c405a5d" [ 781.129873] env[62204]: _type = "Task" [ 781.129873] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.143019] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 67ee5c4d-3825-4580-a26e-74eb8da50883 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 781.145604] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d32690-aac0-f9f2-cb0d-1d946c405a5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.245772] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.288783] env[62204]: DEBUG nova.network.neutron [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updated VIF entry in instance network info cache for port cb48dbbb-646f-445c-89d1-8c4a9e36de59. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 781.289194] env[62204]: DEBUG nova.network.neutron [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.296791] env[62204]: DEBUG nova.compute.manager [req-9e94e52a-df5e-44b8-b294-a96b78108615 req-c0ae261d-8b93-49ac-bdf0-02741681ad99 service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Received event network-vif-plugged-3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 781.296791] env[62204]: DEBUG oslo_concurrency.lockutils [req-9e94e52a-df5e-44b8-b294-a96b78108615 req-c0ae261d-8b93-49ac-bdf0-02741681ad99 service nova] Acquiring lock "cce823b9-6a03-4902-9794-2b93f99eef94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.297098] env[62204]: DEBUG oslo_concurrency.lockutils [req-9e94e52a-df5e-44b8-b294-a96b78108615 req-c0ae261d-8b93-49ac-bdf0-02741681ad99 service nova] Lock "cce823b9-6a03-4902-9794-2b93f99eef94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.297220] env[62204]: DEBUG oslo_concurrency.lockutils [req-9e94e52a-df5e-44b8-b294-a96b78108615 req-c0ae261d-8b93-49ac-bdf0-02741681ad99 service nova] Lock "cce823b9-6a03-4902-9794-2b93f99eef94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.297398] env[62204]: DEBUG nova.compute.manager [req-9e94e52a-df5e-44b8-b294-a96b78108615 req-c0ae261d-8b93-49ac-bdf0-02741681ad99 service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] No waiting events found dispatching network-vif-plugged-3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 781.297604] env[62204]: WARNING nova.compute.manager [req-9e94e52a-df5e-44b8-b294-a96b78108615 req-c0ae261d-8b93-49ac-bdf0-02741681ad99 service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Received unexpected event network-vif-plugged-3c41dfa3-6f25-4539-9cbc-c84270886db1 for instance with vm_state building and task_state spawning. [ 781.367560] env[62204]: DEBUG oslo_vmware.api [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199654, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.355811} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.368402] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.368601] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.368781] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 781.368954] env[62204]: INFO nova.compute.manager [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 781.369218] env[62204]: DEBUG oslo.service.loopingcall [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 781.369415] env[62204]: DEBUG nova.compute.manager [-] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 781.369511] env[62204]: DEBUG nova.network.neutron [-] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.491823] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "refresh_cache-cce823b9-6a03-4902-9794-2b93f99eef94" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.492111] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquired lock "refresh_cache-cce823b9-6a03-4902-9794-2b93f99eef94" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.492191] env[62204]: DEBUG nova.network.neutron [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 781.524624] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73466} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.524624] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d6370e37-6f73-4334-8057-a30aa2c39682/d6370e37-6f73-4334-8057-a30aa2c39682.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 781.524624] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 781.524624] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b634146-ce47-4ab4-88f9-f05b58d8e12b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.531563] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 781.531563] env[62204]: value = "task-1199655" [ 781.531563] env[62204]: _type = "Task" [ 781.531563] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.542192] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.550196] env[62204]: DEBUG nova.objects.instance [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'pci_requests' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 781.642544] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d32690-aac0-f9f2-cb0d-1d946c405a5d, 'name': SearchDatastore_Task, 'duration_secs': 0.071667} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.644871] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.645864] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 781.646310] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.646714] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.647476] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 781.649205] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 1121b1b8-127e-475f-8dfc-de43911de39a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 781.651294] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9792b9a8-4b75-4d98-b48e-0fdea6f00d2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.665878] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 781.666159] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 781.667327] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5d3dca6-f568-44c7-bc8b-df9792b7a79b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.674401] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 781.674401] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f4889-c43f-1739-b78c-c2e749cffe53" [ 781.674401] env[62204]: _type = "Task" [ 781.674401] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.682298] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.682529] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.688054] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f4889-c43f-1739-b78c-c2e749cffe53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.791989] env[62204]: DEBUG oslo_concurrency.lockutils [req-03167c74-2042-488c-ad2e-98887c488328 req-c749ce2a-2400-4bac-a55f-1a249dde7081 service nova] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.038856] env[62204]: DEBUG nova.network.neutron [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.049055] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149318} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.052222] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 782.052222] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3110fbe9-62fd-431c-903c-6bddb776f822 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.055238] env[62204]: DEBUG nova.objects.base [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 782.055558] env[62204]: DEBUG nova.network.neutron [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 782.083478] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] d6370e37-6f73-4334-8057-a30aa2c39682/d6370e37-6f73-4334-8057-a30aa2c39682.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 782.086722] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2d2f681-acc0-43cc-8671-7dcac632be59 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.114534] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 782.114534] env[62204]: value = "task-1199656" [ 782.114534] env[62204]: _type = "Task" [ 782.114534] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.124584] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.130448] env[62204]: DEBUG nova.network.neutron [-] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.153251] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4db54726-495c-48c3-951b-04cf54c9ec56 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.108s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.155958] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 1a1cb81f-383e-48de-8c11-3d5e2c801f40 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.186115] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f4889-c43f-1739-b78c-c2e749cffe53, 'name': SearchDatastore_Task, 'duration_secs': 0.029998} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.187084] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d938c4-bfe4-4a88-b71d-a400f41d78b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.194353] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 782.194353] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5284518b-6710-9659-a6f0-1f17bfde8e9d" [ 782.194353] env[62204]: _type = "Task" [ 782.194353] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.203120] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5284518b-6710-9659-a6f0-1f17bfde8e9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.242063] env[62204]: DEBUG nova.network.neutron [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Updating instance_info_cache with network_info: [{"id": "3c41dfa3-6f25-4539-9cbc-c84270886db1", "address": "fa:16:3e:74:81:15", "network": {"id": "5511b371-276f-4ada-8042-fe043ac4b297", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-635468146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6054de8928a649f289eeb55fc544a8ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c41dfa3-6f", "ovs_interfaceid": "3c41dfa3-6f25-4539-9cbc-c84270886db1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.625311] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199656, 'name': ReconfigVM_Task, 'duration_secs': 0.47621} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.625311] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Reconfigured VM instance instance-0000003a to attach disk [datastore1] d6370e37-6f73-4334-8057-a30aa2c39682/d6370e37-6f73-4334-8057-a30aa2c39682.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 782.625604] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3873d0ba-8d74-425f-b8ef-1c205b1ceadf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.633494] env[62204]: INFO nova.compute.manager [-] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Took 1.26 seconds to deallocate network for instance. [ 782.633879] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 782.633879] env[62204]: value = "task-1199657" [ 782.633879] env[62204]: _type = "Task" [ 782.633879] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.648751] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199657, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.658783] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.705979] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5284518b-6710-9659-a6f0-1f17bfde8e9d, 'name': SearchDatastore_Task, 'duration_secs': 0.01662} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.706308] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.706622] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 782.706919] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02458343-e514-44ee-9bb8-774cc2242cc9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.715875] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 782.715875] env[62204]: value = "task-1199658" [ 782.715875] env[62204]: _type = "Task" [ 782.715875] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.726363] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199658, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.730249] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.730497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.744822] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Releasing lock "refresh_cache-cce823b9-6a03-4902-9794-2b93f99eef94" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.745153] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Instance network_info: |[{"id": "3c41dfa3-6f25-4539-9cbc-c84270886db1", "address": "fa:16:3e:74:81:15", "network": {"id": "5511b371-276f-4ada-8042-fe043ac4b297", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-635468146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6054de8928a649f289eeb55fc544a8ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c41dfa3-6f", "ovs_interfaceid": "3c41dfa3-6f25-4539-9cbc-c84270886db1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 782.745890] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:81:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c41dfa3-6f25-4539-9cbc-c84270886db1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 782.753413] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Creating folder: Project (6054de8928a649f289eeb55fc544a8ee). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 782.754201] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81f6e0e3-93e3-485e-bbf1-1666a7c1d483 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.767849] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Created folder: Project (6054de8928a649f289eeb55fc544a8ee) in parent group-v259933. [ 782.768124] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Creating folder: Instances. Parent ref: group-v260016. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 782.768451] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fda9c5b-4675-438c-9439-fb4311a3f623 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.780055] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Created folder: Instances in parent group-v260016. [ 782.780204] env[62204]: DEBUG oslo.service.loopingcall [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 782.780367] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 782.780585] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75049acf-bb41-45ac-a04f-0edbc42efa66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.800487] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 782.800487] env[62204]: value = "task-1199661" [ 782.800487] env[62204]: _type = "Task" [ 782.800487] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.810075] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199661, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.144142] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 783.151393] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199657, 'name': Rename_Task, 'duration_secs': 0.170603} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.152049] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 783.152308] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f876c5a-2f67-433c-a341-1fa84496f3b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.164967] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 60eaec9c-5dcc-4e2f-9649-78acba318a6b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.168097] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 783.168097] env[62204]: value = "task-1199662" [ 783.168097] env[62204]: _type = "Task" [ 783.168097] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.181734] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.229829] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199658, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.313923] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199661, 'name': CreateVM_Task, 'duration_secs': 0.43799} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.314388] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.315124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.315298] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.315686] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 783.317043] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e65323b-48ee-4873-9180-632889e36a0a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.323793] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 783.323793] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a01733-3891-7078-b427-ab4345feb6c9" [ 783.323793] env[62204]: _type = "Task" [ 783.323793] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.335868] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a01733-3891-7078-b427-ab4345feb6c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.430923] env[62204]: DEBUG nova.compute.manager [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Received event network-changed-3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.431184] env[62204]: DEBUG nova.compute.manager [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Refreshing instance network info cache due to event network-changed-3c41dfa3-6f25-4539-9cbc-c84270886db1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 783.431444] env[62204]: DEBUG oslo_concurrency.lockutils [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] Acquiring lock "refresh_cache-cce823b9-6a03-4902-9794-2b93f99eef94" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.431616] env[62204]: DEBUG oslo_concurrency.lockutils [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] Acquired lock "refresh_cache-cce823b9-6a03-4902-9794-2b93f99eef94" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.431827] env[62204]: DEBUG nova.network.neutron [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Refreshing network info cache for port 3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 783.670150] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance f5f0c15f-ae0d-4615-93ab-3203a5d7e090 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 783.684503] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199662, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.728547] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199658, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.777698} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.728838] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 783.729079] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 783.729354] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3299f4b-42c1-4987-9015-cb3e0374bccd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.738512] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 783.738512] env[62204]: value = "task-1199663" [ 783.738512] env[62204]: _type = "Task" [ 783.738512] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.753256] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199663, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.835106] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a01733-3891-7078-b427-ab4345feb6c9, 'name': SearchDatastore_Task, 'duration_secs': 0.058633} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.835452] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.835775] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 783.835987] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.836156] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.836331] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 783.836612] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42a405d0-816d-4899-814d-267ad71245cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.846832] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 783.847987] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 783.847987] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43f9d9ad-a5b4-4dca-9550-288893d09dda {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.854269] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 783.854269] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c8f8ca-dd34-40de-2591-e1fb3cbf4df5" [ 783.854269] env[62204]: _type = "Task" [ 783.854269] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.864105] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c8f8ca-dd34-40de-2591-e1fb3cbf4df5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.129538] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.130930] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 784.130930] env[62204]: DEBUG nova.objects.instance [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'flavor' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.169570] env[62204]: DEBUG nova.network.neutron [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Updated VIF entry in instance network info cache for port 3c41dfa3-6f25-4539-9cbc-c84270886db1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 784.169920] env[62204]: DEBUG nova.network.neutron [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Updating instance_info_cache with network_info: [{"id": "3c41dfa3-6f25-4539-9cbc-c84270886db1", "address": "fa:16:3e:74:81:15", "network": {"id": "5511b371-276f-4ada-8042-fe043ac4b297", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-635468146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6054de8928a649f289eeb55fc544a8ee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2f7abe5d-b8fe-4983-bd50-e7469f1fe7f3", "external-id": "nsx-vlan-transportzone-263", "segmentation_id": 263, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c41dfa3-6f", "ovs_interfaceid": "3c41dfa3-6f25-4539-9cbc-c84270886db1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.179113] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6dc170a4-b08e-44b5-a152-832670e6866b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 784.193037] env[62204]: DEBUG oslo_vmware.api [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199662, 'name': PowerOnVM_Task, 'duration_secs': 0.760425} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.193478] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 784.193828] env[62204]: INFO nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Took 8.72 seconds to spawn the instance on the hypervisor. [ 784.194147] env[62204]: DEBUG nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 784.195544] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d32ff6-d75a-4cc7-a044-92eaf6228231 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.250869] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199663, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083348} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.251535] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 784.252486] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17bc92c-705f-4b93-a683-d868c1b63b2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.277873] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.278229] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a0c8707-f9cd-4301-909b-8e775994a18c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.299788] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 784.299788] env[62204]: value = "task-1199664" [ 784.299788] env[62204]: _type = "Task" [ 784.299788] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.308810] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199664, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.364417] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c8f8ca-dd34-40de-2591-e1fb3cbf4df5, 'name': SearchDatastore_Task, 'duration_secs': 0.019539} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.365228] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-138d8655-86e3-4e6b-be0e-7b260c3eed50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.371722] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 784.371722] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52262b82-b0f3-aaf9-9054-b053b5e71860" [ 784.371722] env[62204]: _type = "Task" [ 784.371722] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.380323] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52262b82-b0f3-aaf9-9054-b053b5e71860, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.673377] env[62204]: DEBUG oslo_concurrency.lockutils [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] Releasing lock "refresh_cache-cce823b9-6a03-4902-9794-2b93f99eef94" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.673429] env[62204]: DEBUG nova.compute.manager [req-10a7d60c-9c7d-48de-b2c8-989ad2ad1a42 req-2703ca1d-da06-4d49-836a-24168cef88cf service nova] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Received event network-vif-deleted-e4c2acaa-505a-49ac-bc57-647785259723 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 784.683597] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 25563dec-7e4d-42d9-b922-0b2354b5d70e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 784.713604] env[62204]: DEBUG nova.objects.instance [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'pci_requests' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.717330] env[62204]: INFO nova.compute.manager [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Took 40.47 seconds to build instance. [ 784.811836] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199664, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.883874] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52262b82-b0f3-aaf9-9054-b053b5e71860, 'name': SearchDatastore_Task, 'duration_secs': 0.037819} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.884174] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 784.884449] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] cce823b9-6a03-4902-9794-2b93f99eef94/cce823b9-6a03-4902-9794-2b93f99eef94.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 784.884733] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed4fb618-1dba-4ad2-955e-ef4ddd995534 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.895206] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 784.895206] env[62204]: value = "task-1199665" [ 784.895206] env[62204]: _type = "Task" [ 784.895206] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.905970] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.188042] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2b728904-19ef-4773-9260-c615da522801 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 785.216158] env[62204]: DEBUG nova.objects.base [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 785.216635] env[62204]: DEBUG nova.network.neutron [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 785.222030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09af9af6-cd92-44a3-a983-25979986f360 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "d6370e37-6f73-4334-8057-a30aa2c39682" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 134.601s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.265107] env[62204]: DEBUG nova.policy [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.314553] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199664, 'name': ReconfigVM_Task, 'duration_secs': 0.75015} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.314553] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Reconfigured VM instance instance-0000003b to attach disk [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 785.315493] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45ef2ba1-6cdf-49eb-ba04-6b116ec3e535 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.328234] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 785.328234] env[62204]: value = "task-1199666" [ 785.328234] env[62204]: _type = "Task" [ 785.328234] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.340228] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199666, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.408090] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199665, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.556430] env[62204]: DEBUG nova.network.neutron [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Successfully created port: 33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.691800] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance a2a37a1b-3ef0-4be7-924c-66c7a1583b68 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 785.692165] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 785.692364] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 785.725260] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 785.843039] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199666, 'name': Rename_Task, 'duration_secs': 0.227794} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.845828] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 785.846313] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7f481d1-e759-4123-97be-3703019c6a9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.856444] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 785.856444] env[62204]: value = "task-1199667" [ 785.856444] env[62204]: _type = "Task" [ 785.856444] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.871616] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199667, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.912392] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597827} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.912782] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] cce823b9-6a03-4902-9794-2b93f99eef94/cce823b9-6a03-4902-9794-2b93f99eef94.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 785.913055] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 785.913456] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7cdd832-29ca-479f-9234-38d66b172e56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.926994] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 785.926994] env[62204]: value = "task-1199668" [ 785.926994] env[62204]: _type = "Task" [ 785.926994] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.937941] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.126293] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a277143b-49d6-4929-8279-a289c0b21133 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.135184] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5435847-2569-409e-af3c-cc105df7318f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.169401] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c009a42-05ef-43ba-85a3-7056eeeb0982 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.178382] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fb8fb7-f2e2-4b3c-a2bb-80ca8c05f87a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.193857] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.245575] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.357750] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "d6370e37-6f73-4334-8057-a30aa2c39682" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.358069] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "d6370e37-6f73-4334-8057-a30aa2c39682" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.358335] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "d6370e37-6f73-4334-8057-a30aa2c39682-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.358540] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "d6370e37-6f73-4334-8057-a30aa2c39682-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.358784] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "d6370e37-6f73-4334-8057-a30aa2c39682-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.361144] env[62204]: INFO nova.compute.manager [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Terminating instance [ 786.363297] env[62204]: DEBUG nova.compute.manager [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 786.363648] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 786.368287] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fabd7c4-8dd6-408b-a35a-0266f24102ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.377802] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199667, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.380359] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 786.380442] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-367a868d-be57-4096-bef1-b8cb4607b954 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.388941] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 786.388941] env[62204]: value = "task-1199669" [ 786.388941] env[62204]: _type = "Task" [ 786.388941] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.399033] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.435427] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140772} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.435861] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 786.436708] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72ce4d9-37a2-4e8b-a86f-66f664f80341 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.460664] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] cce823b9-6a03-4902-9794-2b93f99eef94/cce823b9-6a03-4902-9794-2b93f99eef94.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 786.461037] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2802aeb7-f583-4084-9e9e-1e8a2f2aafd6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.483309] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 786.483309] env[62204]: value = "task-1199670" [ 786.483309] env[62204]: _type = "Task" [ 786.483309] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.494798] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.697303] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.874843] env[62204]: DEBUG oslo_vmware.api [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199667, 'name': PowerOnVM_Task, 'duration_secs': 0.680633} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.875304] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 786.875687] env[62204]: INFO nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Took 8.88 seconds to spawn the instance on the hypervisor. [ 786.876011] env[62204]: DEBUG nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 786.877316] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03ccc83-7f3e-4290-99fb-ba0e9109a3f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.902921] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199669, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.998638] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199670, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.141694] env[62204]: DEBUG nova.network.neutron [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Successfully updated port: 33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.182641] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f570-5572-e4fc-e21d-e8a520d81efd/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 787.183760] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385c4c31-a3d3-40af-a257-929c4da5605e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.191056] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f570-5572-e4fc-e21d-e8a520d81efd/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 787.191285] env[62204]: ERROR oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f570-5572-e4fc-e21d-e8a520d81efd/disk-0.vmdk due to incomplete transfer. [ 787.191853] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-54a04cf9-902b-497b-8f83-91828afe31dd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.195494] env[62204]: DEBUG nova.compute.manager [req-0e46c03d-7ed3-4996-8108-70a3b77cff0d req-14d7988c-7e98-4fc0-8883-22dd66c33c9b service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-vif-plugged-33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 787.196301] env[62204]: DEBUG oslo_concurrency.lockutils [req-0e46c03d-7ed3-4996-8108-70a3b77cff0d req-14d7988c-7e98-4fc0-8883-22dd66c33c9b service nova] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.196301] env[62204]: DEBUG oslo_concurrency.lockutils [req-0e46c03d-7ed3-4996-8108-70a3b77cff0d req-14d7988c-7e98-4fc0-8883-22dd66c33c9b service nova] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.196301] env[62204]: DEBUG oslo_concurrency.lockutils [req-0e46c03d-7ed3-4996-8108-70a3b77cff0d req-14d7988c-7e98-4fc0-8883-22dd66c33c9b service nova] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.196494] env[62204]: DEBUG nova.compute.manager [req-0e46c03d-7ed3-4996-8108-70a3b77cff0d req-14d7988c-7e98-4fc0-8883-22dd66c33c9b service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] No waiting events found dispatching network-vif-plugged-33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 787.196672] env[62204]: WARNING nova.compute.manager [req-0e46c03d-7ed3-4996-8108-70a3b77cff0d req-14d7988c-7e98-4fc0-8883-22dd66c33c9b service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received unexpected event network-vif-plugged-33e67759-b1fb-4395-9ed1-bf2102c8d3ee for instance with vm_state active and task_state None. [ 787.202423] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 787.202629] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.622s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.202869] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.723s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.204371] env[62204]: INFO nova.compute.claims [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.206932] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 787.207095] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Cleaning up deleted instances {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 787.210997] env[62204]: DEBUG oslo_vmware.rw_handles [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e3f570-5572-e4fc-e21d-e8a520d81efd/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 787.210997] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Uploaded image 8831d989-636e-474d-a1bb-9b95868f4ba9 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 787.212592] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 787.212592] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-86a0cebd-d287-4093-9cfd-b93e56134690 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.220594] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 787.220594] env[62204]: value = "task-1199671" [ 787.220594] env[62204]: _type = "Task" [ 787.220594] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.233263] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199671, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.405223] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199669, 'name': PowerOffVM_Task, 'duration_secs': 0.896764} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.407378] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 787.407570] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 787.408110] env[62204]: INFO nova.compute.manager [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Took 36.16 seconds to build instance. [ 787.409041] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d0d29cc-249d-4a08-82bc-65c36e081c3f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.486608] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 787.486867] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 787.487084] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Deleting the datastore file [datastore1] d6370e37-6f73-4334-8057-a30aa2c39682 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.490963] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15b2c06e-e857-4678-80b2-ed64ed53387e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.499619] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199670, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.500923] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for the task: (returnval){ [ 787.500923] env[62204]: value = "task-1199673" [ 787.500923] env[62204]: _type = "Task" [ 787.500923] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.508978] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199673, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.645878] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.646374] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.646585] env[62204]: DEBUG nova.network.neutron [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 787.716518] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] There are 11 instances to clean {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 787.716518] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 186a2de8-2b9e-4c84-8502-cb0ed3b43123] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 787.731225] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199671, 'name': Destroy_Task, 'duration_secs': 0.375454} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.731340] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroyed the VM [ 787.731666] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 787.731922] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-efd0bea9-2b08-43ea-b80d-738ee4ff56f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.740353] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 787.740353] env[62204]: value = "task-1199674" [ 787.740353] env[62204]: _type = "Task" [ 787.740353] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.748985] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199674, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.912174] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a13a576f-5aa4-44e5-8c92-92910edc5a00 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.425s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.996711] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199670, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.010821] env[62204]: DEBUG oslo_vmware.api [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Task: {'id': task-1199673, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190005} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.011829] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.011829] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 788.011829] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.011829] env[62204]: INFO nova.compute.manager [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Took 1.65 seconds to destroy the instance on the hypervisor. [ 788.012029] env[62204]: DEBUG oslo.service.loopingcall [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 788.012073] env[62204]: DEBUG nova.compute.manager [-] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 788.012336] env[62204]: DEBUG nova.network.neutron [-] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 788.187176] env[62204]: WARNING nova.network.neutron [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] 3b20dcf5-a239-493a-bd84-9815cabea48a already exists in list: networks containing: ['3b20dcf5-a239-493a-bd84-9815cabea48a']. ignoring it [ 788.217329] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "031cb3ff-4a80-4961-a399-de31fc72e65b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 788.217598] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 788.221605] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 258ec37d-c791-4c43-8725-0f4b4bbf9b5b] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 788.252145] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199674, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.414751] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 788.497867] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199670, 'name': ReconfigVM_Task, 'duration_secs': 1.976574} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.500168] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Reconfigured VM instance instance-0000003c to attach disk [datastore1] cce823b9-6a03-4902-9794-2b93f99eef94/cce823b9-6a03-4902-9794-2b93f99eef94.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 788.502957] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf33730b-5028-479e-b429-c0fbe35774d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.510245] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 788.510245] env[62204]: value = "task-1199675" [ 788.510245] env[62204]: _type = "Task" [ 788.510245] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.520352] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199675, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.597520] env[62204]: DEBUG nova.network.neutron [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "address": "fa:16:3e:36:c2:b3", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33e67759-b1", "ovs_interfaceid": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.640330] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969ea1b6-2f08-4c52-8d7a-81dd6f3a81a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.648747] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a5c551-7a82-4986-926e-332d21f5b96f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.683451] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f00cec-3ce5-4a45-b989-8d723117030b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.692117] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ab8a71-278a-48c5-b73e-ad034b02722b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.706458] env[62204]: DEBUG nova.compute.provider_tree [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.725326] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 12656a79-a836-452c-8f94-c8e142c9ec2f] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 788.753630] env[62204]: DEBUG oslo_vmware.api [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199674, 'name': RemoveSnapshot_Task, 'duration_secs': 0.539842} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.754080] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 788.754369] env[62204]: INFO nova.compute.manager [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 16.84 seconds to snapshot the instance on the hypervisor. [ 788.799300] env[62204]: DEBUG nova.network.neutron [-] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.944861] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 789.020940] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199675, 'name': Rename_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.101111] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.101862] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.102070] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.102990] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0b959f-820d-4cab-b6f9-f220248b2a8b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.124709] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 789.124709] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 789.124709] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.124709] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 789.125054] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.125054] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 789.125184] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 789.125358] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 789.125573] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 789.125789] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 789.126032] env[62204]: DEBUG nova.virt.hardware [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 789.132744] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfiguring VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 789.133999] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e78dc659-508d-4301-ae00-3408be57ee41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.153072] env[62204]: DEBUG oslo_vmware.api [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 789.153072] env[62204]: value = "task-1199676" [ 789.153072] env[62204]: _type = "Task" [ 789.153072] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.165617] env[62204]: DEBUG oslo_vmware.api [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199676, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.210870] env[62204]: DEBUG nova.scheduler.client.report [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 789.228073] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: b0180c2b-8edf-4d15-8d12-c754b73f6030] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 789.302505] env[62204]: INFO nova.compute.manager [-] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Took 1.29 seconds to deallocate network for instance. [ 789.316845] env[62204]: DEBUG nova.compute.manager [None req-6afd7a5c-d9e9-4a37-a145-8542c3f53c7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Found 1 images (rotation: 2) {{(pid=62204) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 789.476089] env[62204]: INFO nova.compute.manager [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Rebuilding instance [ 789.519204] env[62204]: DEBUG nova.compute.manager [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 789.520163] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca7092c-d6a5-46c3-ba5c-6b14b20a54bb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.526036] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199675, 'name': Rename_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.663789] env[62204]: DEBUG oslo_vmware.api [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.707977] env[62204]: DEBUG nova.compute.manager [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-changed-33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 789.708205] env[62204]: DEBUG nova.compute.manager [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing instance network info cache due to event network-changed-33e67759-b1fb-4395-9ed1-bf2102c8d3ee. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 789.708425] env[62204]: DEBUG oslo_concurrency.lockutils [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 789.708574] env[62204]: DEBUG oslo_concurrency.lockutils [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 789.708806] env[62204]: DEBUG nova.network.neutron [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing network info cache for port 33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 789.716135] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.716642] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 789.719975] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.500s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.720191] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.722558] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.537s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.722558] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.724139] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.725541] env[62204]: INFO nova.compute.claims [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.731230] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 19326d9f-5f3a-4756-874f-d4d3ce25f8e8] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 789.746806] env[62204]: INFO nova.scheduler.client.report [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Deleted allocations for instance eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948 [ 789.748737] env[62204]: INFO nova.scheduler.client.report [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Deleted allocations for instance 432115aa-8999-40fe-a0cb-31433575c912 [ 789.810829] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.023639] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199675, 'name': Rename_Task, 'duration_secs': 1.160761} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.023997] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.024311] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07fb0841-293e-4f36-a40c-8487c1b23643 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.032178] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 790.032178] env[62204]: value = "task-1199677" [ 790.032178] env[62204]: _type = "Task" [ 790.032178] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.036704] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.036992] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af7e55ca-9954-4887-9ab1-93388d477995 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.044615] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199677, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.046064] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 790.046064] env[62204]: value = "task-1199678" [ 790.046064] env[62204]: _type = "Task" [ 790.046064] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.055059] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199678, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.166050] env[62204]: DEBUG oslo_vmware.api [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199676, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.231549] env[62204]: DEBUG nova.compute.utils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 790.236428] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 790.236592] env[62204]: DEBUG nova.network.neutron [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 790.238685] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 48fe8f43-4ab9-41de-9b81-35b4438585ea] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 790.263888] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a62e0945-aae3-4a98-b901-2840ad9eccbf tempest-InstanceActionsV221TestJSON-1563664978 tempest-InstanceActionsV221TestJSON-1563664978-project-member] Lock "eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.168s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.275309] env[62204]: DEBUG oslo_concurrency.lockutils [None req-73bcd30f-8592-4516-b560-8f0680880820 tempest-ListServerFiltersTestJSON-1265686823 tempest-ListServerFiltersTestJSON-1265686823-project-member] Lock "432115aa-8999-40fe-a0cb-31433575c912" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.317s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.295337] env[62204]: DEBUG nova.policy [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '370d4b8a24b84bf0a626d056c7758863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb9a24ef26c74781a2ad36e3430ce630', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 790.450182] env[62204]: DEBUG nova.network.neutron [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updated VIF entry in instance network info cache for port 33e67759-b1fb-4395-9ed1-bf2102c8d3ee. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 790.450678] env[62204]: DEBUG nova.network.neutron [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "address": "fa:16:3e:36:c2:b3", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33e67759-b1", "ovs_interfaceid": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.544040] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199677, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.551077] env[62204]: DEBUG nova.network.neutron [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Successfully created port: 8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.559778] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199678, 'name': PowerOffVM_Task, 'duration_secs': 0.273403} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.560086] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 790.560793] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 790.561083] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f861eea9-975a-4add-be7f-0c2b1768726a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.571187] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 790.571187] env[62204]: value = "task-1199679" [ 790.571187] env[62204]: _type = "Task" [ 790.571187] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.581216] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 790.581453] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 790.581660] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-259949', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'name': 'volume-e1a1d343-79bf-455c-8446-09fa8e9f2035', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '69604167-6a61-4723-bf7d-7ba168837839', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'serial': 'e1a1d343-79bf-455c-8446-09fa8e9f2035'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 790.582449] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c9c9ab-f418-47c5-8a48-96e414ea5843 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.601818] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb6d8da-4eb0-4071-8bbd-2957fc480da1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.610097] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71fbb23-a763-49d9-9bab-bd32b4b9ba5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.632698] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f281959-dbfc-4f9c-9ddc-420920404f46 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.656046] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] The volume has not been displaced from its original location: [datastore2] volume-e1a1d343-79bf-455c-8446-09fa8e9f2035/volume-e1a1d343-79bf-455c-8446-09fa8e9f2035.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 790.661879] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 790.665975] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369c8c61-7927-4436-8d7f-e37336f31147 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.686128] env[62204]: DEBUG oslo_vmware.api [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199676, 'name': ReconfigVM_Task, 'duration_secs': 1.242287} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.687907] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.688077] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfigured VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 790.691272] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 790.691272] env[62204]: value = "task-1199680" [ 790.691272] env[62204]: _type = "Task" [ 790.691272] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.701331] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199680, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.738019] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 790.744712] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: bcb11a72-4394-42a2-9a9f-295adc1abcd0] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 790.955635] env[62204]: DEBUG oslo_concurrency.lockutils [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 790.955919] env[62204]: DEBUG nova.compute.manager [req-c5afbbfc-671a-47ce-a574-acf04de57db7 req-91713026-8656-4c3d-b872-3c7e2bc1bdd9 service nova] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Received event network-vif-deleted-5418f42e-f30f-475c-bbfd-e2ca7e8921ef {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 791.050362] env[62204]: DEBUG oslo_vmware.api [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199677, 'name': PowerOnVM_Task, 'duration_secs': 0.644316} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.050728] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.050992] env[62204]: INFO nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Took 10.44 seconds to spawn the instance on the hypervisor. [ 791.051261] env[62204]: DEBUG nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 791.055300] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5544eb-95a9-424e-8bcf-1f28d11aa027 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.139949] env[62204]: DEBUG nova.compute.manager [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 791.142270] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0ce142-c17e-409d-b451-7003ec46ce76 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.145564] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad1ea68-f46b-4a57-bf3d-de04243e644f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.158025] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46c89b4-c20b-4452-a11c-fcbea73d78bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.191219] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316aaf61-e960-438f-b118-1ef1ab79532b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.194824] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f1948707-2f28-4c74-8d03-a3b738c648f3 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.065s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.207803] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95125ebe-a78d-46f1-85eb-590b14dc4084 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.211584] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199680, 'name': ReconfigVM_Task, 'duration_secs': 0.325392} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.211681] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 791.217320] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c56bf2e9-5791-43a4-b212-dba3b3afc8f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.234952] env[62204]: DEBUG nova.compute.provider_tree [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.245446] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 791.245446] env[62204]: value = "task-1199681" [ 791.245446] env[62204]: _type = "Task" [ 791.245446] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.249310] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: ce74983e-8347-425c-967a-6a78a7daa701] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 791.257138] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199681, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.577618] env[62204]: INFO nova.compute.manager [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Took 39.66 seconds to build instance. [ 791.657430] env[62204]: INFO nova.compute.manager [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] instance snapshotting [ 791.658058] env[62204]: DEBUG nova.objects.instance [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'flavor' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 791.737691] env[62204]: DEBUG nova.scheduler.client.report [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 791.747307] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 791.753131] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7c21539c-35fa-4f58-beb0-e965ffaf79af] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 791.761504] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199681, 'name': ReconfigVM_Task, 'duration_secs': 0.133595} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.761811] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-259949', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'name': 'volume-e1a1d343-79bf-455c-8446-09fa8e9f2035', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '69604167-6a61-4723-bf7d-7ba168837839', 'attached_at': '', 'detached_at': '', 'volume_id': 'e1a1d343-79bf-455c-8446-09fa8e9f2035', 'serial': 'e1a1d343-79bf-455c-8446-09fa8e9f2035'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 791.762097] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 791.764905] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d86f26-f95e-4159-beb0-bded562ee50d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.773388] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 791.775622] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.775854] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.776044] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.776248] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.776398] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.776550] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.776762] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.776922] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.777126] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.777272] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.777445] env[62204]: DEBUG nova.virt.hardware [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.777706] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-961cdc61-511e-43d8-8bf9-5930461969b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.781019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922ca742-2899-49fe-8dd6-b62c49842379 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.788666] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7966ebee-e47e-4a55-a931-fb83e0786695 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.863496] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 791.865771] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 791.865771] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Deleting the datastore file [datastore2] 69604167-6a61-4723-bf7d-7ba168837839 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 791.865771] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5856085-feeb-43fd-9339-b1e28794eae1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.874041] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for the task: (returnval){ [ 791.874041] env[62204]: value = "task-1199683" [ 791.874041] env[62204]: _type = "Task" [ 791.874041] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.881159] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.079546] env[62204]: DEBUG oslo_concurrency.lockutils [None req-70bba7f0-c90a-47b1-a82e-96cfc1174519 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "cce823b9-6a03-4902-9794-2b93f99eef94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.745s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.104091] env[62204]: DEBUG nova.network.neutron [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Successfully updated port: 8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.165738] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75e2c4b-1a49-4420-9c06-4797af0de534 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.188071] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb53260c-4b3d-4990-846f-90bbb37c9a44 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.243041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.243661] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 792.247103] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.990s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.248565] env[62204]: INFO nova.compute.claims [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.260566] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 6c63cc36-4f25-4196-9e74-50dcbefd37a2] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 792.307873] env[62204]: DEBUG nova.compute.manager [req-6d0a2dc7-e04f-4b6b-86a9-c6c3508414b2 req-a61ca337-4c03-4fc2-87c5-14bcc3eb1922 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Received event network-vif-plugged-8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 792.307932] env[62204]: DEBUG oslo_concurrency.lockutils [req-6d0a2dc7-e04f-4b6b-86a9-c6c3508414b2 req-a61ca337-4c03-4fc2-87c5-14bcc3eb1922 service nova] Acquiring lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.308538] env[62204]: DEBUG oslo_concurrency.lockutils [req-6d0a2dc7-e04f-4b6b-86a9-c6c3508414b2 req-a61ca337-4c03-4fc2-87c5-14bcc3eb1922 service nova] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.308725] env[62204]: DEBUG oslo_concurrency.lockutils [req-6d0a2dc7-e04f-4b6b-86a9-c6c3508414b2 req-a61ca337-4c03-4fc2-87c5-14bcc3eb1922 service nova] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.308902] env[62204]: DEBUG nova.compute.manager [req-6d0a2dc7-e04f-4b6b-86a9-c6c3508414b2 req-a61ca337-4c03-4fc2-87c5-14bcc3eb1922 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] No waiting events found dispatching network-vif-plugged-8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 792.310195] env[62204]: WARNING nova.compute.manager [req-6d0a2dc7-e04f-4b6b-86a9-c6c3508414b2 req-a61ca337-4c03-4fc2-87c5-14bcc3eb1922 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Received unexpected event network-vif-plugged-8d18fd6c-f785-481a-bfed-eea83e68e234 for instance with vm_state building and task_state spawning. [ 792.385652] env[62204]: DEBUG oslo_vmware.api [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Task: {'id': task-1199683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082509} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.385958] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.386132] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.386388] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.455113] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 792.455534] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bed8a3b5-e8d5-43e8-a884-c4f6aef3c471 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.465833] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1650b187-2fbe-4f1f-b745-50c5fe834ebd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.498387] env[62204]: ERROR nova.compute.manager [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Failed to detach volume e1a1d343-79bf-455c-8446-09fa8e9f2035 from /dev/sda: nova.exception.InstanceNotFound: Instance 69604167-6a61-4723-bf7d-7ba168837839 could not be found. [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] Traceback (most recent call last): [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self.driver.rebuild(**kwargs) [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] raise NotImplementedError() [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] NotImplementedError [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] During handling of the above exception, another exception occurred: [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] Traceback (most recent call last): [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self.driver.detach_volume(context, old_connection_info, [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] return self._volumeops.detach_volume(connection_info, instance) [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._detach_volume_vmdk(connection_info, instance) [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] stable_ref.fetch_moref(session) [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] raise exception.InstanceNotFound(instance_id=self._uuid) [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] nova.exception.InstanceNotFound: Instance 69604167-6a61-4723-bf7d-7ba168837839 could not be found. [ 792.498387] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.581792] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 792.610365] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "refresh_cache-dba1edda-edfd-4a97-ab95-48f3f5a933f8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.610365] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "refresh_cache-dba1edda-edfd-4a97-ab95-48f3f5a933f8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.610365] env[62204]: DEBUG nova.network.neutron [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 792.681765] env[62204]: DEBUG nova.compute.utils [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Build of instance 69604167-6a61-4723-bf7d-7ba168837839 aborted: Failed to rebuild volume backed instance. {{(pid=62204) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 792.689032] env[62204]: ERROR nova.compute.manager [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 69604167-6a61-4723-bf7d-7ba168837839 aborted: Failed to rebuild volume backed instance. [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] Traceback (most recent call last): [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self.driver.rebuild(**kwargs) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] raise NotImplementedError() [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] NotImplementedError [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] During handling of the above exception, another exception occurred: [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] Traceback (most recent call last): [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._detach_root_volume(context, instance, root_bdm) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] with excutils.save_and_reraise_exception(): [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self.force_reraise() [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] raise self.value [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self.driver.detach_volume(context, old_connection_info, [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] return self._volumeops.detach_volume(connection_info, instance) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._detach_volume_vmdk(connection_info, instance) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] stable_ref.fetch_moref(session) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] raise exception.InstanceNotFound(instance_id=self._uuid) [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] nova.exception.InstanceNotFound: Instance 69604167-6a61-4723-bf7d-7ba168837839 could not be found. [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] During handling of the above exception, another exception occurred: [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] Traceback (most recent call last): [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] yield [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 792.689032] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._do_rebuild_instance_with_claim( [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._do_rebuild_instance( [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._rebuild_default_impl(**kwargs) [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] self._rebuild_volume_backed_instance( [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] raise exception.BuildAbortException( [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] nova.exception.BuildAbortException: Build of instance 69604167-6a61-4723-bf7d-7ba168837839 aborted: Failed to rebuild volume backed instance. [ 792.690183] env[62204]: ERROR nova.compute.manager [instance: 69604167-6a61-4723-bf7d-7ba168837839] [ 792.703193] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 792.704503] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-eef3b3c1-997c-4447-acd5-b139a96566c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.714750] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 792.714750] env[62204]: value = "task-1199684" [ 792.714750] env[62204]: _type = "Task" [ 792.714750] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.726294] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199684, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.748308] env[62204]: DEBUG nova.compute.utils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 792.749718] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 792.749889] env[62204]: DEBUG nova.network.neutron [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 792.761338] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 4644dfab-0758-43e6-bbcc-9930f086a4e5] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 792.853216] env[62204]: DEBUG nova.policy [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2edea246e74173bbdb4365d0309cd7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be5f3f8b28ab4b63a2621b1fe1383af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 793.108578] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.166403] env[62204]: DEBUG nova.network.neutron [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.226854] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199684, 'name': CreateSnapshot_Task, 'duration_secs': 0.446731} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.227204] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 793.228349] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f18210d-9d08-490e-ab84-22bf8987e9dd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.257135] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 793.269430] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.269430] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Cleaning up deleted instances with incomplete migration {{(pid=62204) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 793.388898] env[62204]: DEBUG nova.network.neutron [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Updating instance_info_cache with network_info: [{"id": "8d18fd6c-f785-481a-bfed-eea83e68e234", "address": "fa:16:3e:7f:a9:19", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d18fd6c-f7", "ovs_interfaceid": "8d18fd6c-f785-481a-bfed-eea83e68e234", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.420452] env[62204]: DEBUG nova.network.neutron [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Successfully created port: 8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.657462] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-1eef631c-fa58-4ad5-862e-c25b847eaa2c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.662176] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-1eef631c-fa58-4ad5-862e-c25b847eaa2c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.003s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.662176] env[62204]: DEBUG nova.objects.instance [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'flavor' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 793.749563] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 793.755415] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-21286181-5800-453c-91e5-22aea5faa271 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.764910] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 793.764910] env[62204]: value = "task-1199685" [ 793.764910] env[62204]: _type = "Task" [ 793.764910] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.774871] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94876d65-bf43-48b1-bf67-476186951b97 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.778229] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.784686] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199685, 'name': CloneVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.788129] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f0b09d-160f-4610-8847-3231545093a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.823253] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabb7d35-1211-4460-a7ca-52259e26d9fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.831766] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60b2380-a88a-42c8-bf09-1ec8a9458290 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.847741] env[62204]: DEBUG nova.compute.provider_tree [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.895378] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "refresh_cache-dba1edda-edfd-4a97-ab95-48f3f5a933f8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.895378] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Instance network_info: |[{"id": "8d18fd6c-f785-481a-bfed-eea83e68e234", "address": "fa:16:3e:7f:a9:19", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d18fd6c-f7", "ovs_interfaceid": "8d18fd6c-f785-481a-bfed-eea83e68e234", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 793.895378] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:a9:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d18fd6c-f785-481a-bfed-eea83e68e234', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 793.903370] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating folder: Project (fb9a24ef26c74781a2ad36e3430ce630). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.903705] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c75f22f8-894a-4a18-bad7-77204e4d608d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.915626] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created folder: Project (fb9a24ef26c74781a2ad36e3430ce630) in parent group-v259933. [ 793.915791] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating folder: Instances. Parent ref: group-v260021. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 793.916043] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-655541bc-3a05-4c27-83bb-1a0dd474eb34 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.928266] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created folder: Instances in parent group-v260021. [ 793.928581] env[62204]: DEBUG oslo.service.loopingcall [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.928795] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 793.928964] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07b11ae0-7719-47e0-b916-f3c8363959a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.950155] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 793.950155] env[62204]: value = "task-1199688" [ 793.950155] env[62204]: _type = "Task" [ 793.950155] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.956866] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199688, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.270877] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 794.291685] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199685, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.302466] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 794.302746] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 794.302937] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 794.303156] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 794.303312] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 794.303501] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 794.303725] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 794.303910] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 794.304129] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 794.304329] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 794.304536] env[62204]: DEBUG nova.virt.hardware [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.305365] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3be3b3-37c2-4675-84b2-a8ae5724baf7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.313549] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413ffbf4-8b56-489a-9805-d93277fb24fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.351646] env[62204]: DEBUG nova.scheduler.client.report [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 794.415955] env[62204]: DEBUG nova.objects.instance [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'pci_requests' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.457977] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199688, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.709304] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.782489] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199685, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.857723] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.858304] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 794.861456] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.091s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.861675] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.863765] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.400s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.868019] env[62204]: INFO nova.compute.claims [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 794.886427] env[62204]: DEBUG nova.compute.manager [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Received event network-changed-8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 794.886427] env[62204]: DEBUG nova.compute.manager [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Refreshing instance network info cache due to event network-changed-8d18fd6c-f785-481a-bfed-eea83e68e234. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 794.886427] env[62204]: DEBUG oslo_concurrency.lockutils [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] Acquiring lock "refresh_cache-dba1edda-edfd-4a97-ab95-48f3f5a933f8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.886427] env[62204]: DEBUG oslo_concurrency.lockutils [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] Acquired lock "refresh_cache-dba1edda-edfd-4a97-ab95-48f3f5a933f8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.886427] env[62204]: DEBUG nova.network.neutron [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Refreshing network info cache for port 8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 794.901649] env[62204]: INFO nova.scheduler.client.report [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Deleted allocations for instance 55d1649c-5eff-4264-bce1-dd907f9531f2 [ 794.918468] env[62204]: DEBUG nova.objects.base [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 794.918703] env[62204]: DEBUG nova.network.neutron [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 794.963227] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199688, 'name': CreateVM_Task, 'duration_secs': 0.530176} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.963793] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 794.964524] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 794.965447] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.965447] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 794.965590] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b3c66ed-95dc-4fe2-8489-17e1b70b7c85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.970951] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 794.970951] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a59b88-75b0-90bf-f99f-75509147794e" [ 794.970951] env[62204]: _type = "Task" [ 794.970951] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.979393] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a59b88-75b0-90bf-f99f-75509147794e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.068311] env[62204]: DEBUG nova.policy [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 795.286698] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199685, 'name': CloneVM_Task, 'duration_secs': 1.221238} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.287028] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created linked-clone VM from snapshot [ 795.287775] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc7ee12-82fd-48da-b890-029a89b333e8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.297798] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Uploading image 4f9ea6a5-3f0b-47f0-a5c9-c4019eca38dd {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 795.329053] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 795.329053] env[62204]: value = "vm-260020" [ 795.329053] env[62204]: _type = "VirtualMachine" [ 795.329053] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 795.329402] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-62e6f7ba-de4e-4b27-bfb7-15edb120fc89 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.337573] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease: (returnval){ [ 795.337573] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521fb6de-f168-5ff1-d522-b568f98d27ea" [ 795.337573] env[62204]: _type = "HttpNfcLease" [ 795.337573] env[62204]: } obtained for exporting VM: (result){ [ 795.337573] env[62204]: value = "vm-260020" [ 795.337573] env[62204]: _type = "VirtualMachine" [ 795.337573] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 795.337573] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the lease: (returnval){ [ 795.337573] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521fb6de-f168-5ff1-d522-b568f98d27ea" [ 795.337573] env[62204]: _type = "HttpNfcLease" [ 795.337573] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 795.347823] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 795.347823] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521fb6de-f168-5ff1-d522-b568f98d27ea" [ 795.347823] env[62204]: _type = "HttpNfcLease" [ 795.347823] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 795.372350] env[62204]: DEBUG nova.compute.utils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 795.380129] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 795.380318] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 795.410709] env[62204]: DEBUG oslo_concurrency.lockutils [None req-933558cf-b669-4e1b-8073-1970311d9489 tempest-ServerAddressesTestJSON-766158484 tempest-ServerAddressesTestJSON-766158484-project-member] Lock "55d1649c-5eff-4264-bce1-dd907f9531f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.730s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.482710] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a59b88-75b0-90bf-f99f-75509147794e, 'name': SearchDatastore_Task, 'duration_secs': 0.012113} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.484948] env[62204]: DEBUG nova.policy [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6169c0354c748aaa350e0d310620eb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '286c8643ef8c4d5ca5482bba5952ed53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 795.486951] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 795.487130] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.487361] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.487517] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.487701] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.488019] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-409ebdf3-b0de-470e-8eda-7e6a25fa4c59 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.497325] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.497468] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.498269] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23545b06-93fe-49c8-a102-a2c76cbbcf34 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.504029] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 795.504029] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5279a297-3c26-5279-5e54-95d625af6ffe" [ 795.504029] env[62204]: _type = "Task" [ 795.504029] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.516586] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5279a297-3c26-5279-5e54-95d625af6ffe, 'name': SearchDatastore_Task, 'duration_secs': 0.008578} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.517369] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8654faee-42e4-406e-ad26-711686ba0b0d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.523081] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 795.523081] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52335a3a-b890-e507-eee9-e405094d385b" [ 795.523081] env[62204]: _type = "Task" [ 795.523081] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.531764] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52335a3a-b890-e507-eee9-e405094d385b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.593846] env[62204]: DEBUG nova.network.neutron [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Successfully updated port: 8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.775387] env[62204]: DEBUG nova.network.neutron [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Updated VIF entry in instance network info cache for port 8d18fd6c-f785-481a-bfed-eea83e68e234. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 795.775778] env[62204]: DEBUG nova.network.neutron [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Updating instance_info_cache with network_info: [{"id": "8d18fd6c-f785-481a-bfed-eea83e68e234", "address": "fa:16:3e:7f:a9:19", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d18fd6c-f7", "ovs_interfaceid": "8d18fd6c-f785-481a-bfed-eea83e68e234", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.847549] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 795.847549] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521fb6de-f168-5ff1-d522-b568f98d27ea" [ 795.847549] env[62204]: _type = "HttpNfcLease" [ 795.847549] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 795.847549] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 795.847549] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521fb6de-f168-5ff1-d522-b568f98d27ea" [ 795.847549] env[62204]: _type = "HttpNfcLease" [ 795.847549] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 795.848172] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8704fd-fc86-4c6b-a493-bfb196b24729 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.855712] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52865c0f-98ad-439b-ac49-eab061721cca/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 795.855899] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52865c0f-98ad-439b-ac49-eab061721cca/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 795.918924] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 795.959065] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c4972511-233c-4382-a9eb-16c32cbb55ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.030015] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Successfully created port: 557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.036291] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52335a3a-b890-e507-eee9-e405094d385b, 'name': SearchDatastore_Task, 'duration_secs': 0.010862} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.036511] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.036839] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8/dba1edda-edfd-4a97-ab95-48f3f5a933f8.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.037022] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30650c3f-bb41-4288-8eb0-23606e51cf31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.043865] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 796.043865] env[62204]: value = "task-1199690" [ 796.043865] env[62204]: _type = "Task" [ 796.043865] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.053843] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.097425] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-2c393123-87de-460a-965d-43473478a79f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 796.097425] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-2c393123-87de-460a-965d-43473478a79f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.097425] env[62204]: DEBUG nova.network.neutron [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 796.145072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.147153] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.002s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.148829] env[62204]: INFO nova.compute.manager [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Shelving [ 796.225015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "69604167-6a61-4723-bf7d-7ba168837839" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.225015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "69604167-6a61-4723-bf7d-7ba168837839" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.225015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "69604167-6a61-4723-bf7d-7ba168837839-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.225015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "69604167-6a61-4723-bf7d-7ba168837839-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.225015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "69604167-6a61-4723-bf7d-7ba168837839-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.227230] env[62204]: INFO nova.compute.manager [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Terminating instance [ 796.229507] env[62204]: DEBUG nova.compute.manager [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 796.230565] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae26c77c-9548-422c-97bd-4e6ba9a8d44e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.241647] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809419ea-ae09-45d3-8e74-355b6a95f99d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.275333] env[62204]: WARNING nova.virt.vmwareapi.driver [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 69604167-6a61-4723-bf7d-7ba168837839 could not be found. [ 796.275765] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.279225] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a090c8f-d492-4049-9f70-93f3c7a91fcf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.282020] env[62204]: DEBUG oslo_concurrency.lockutils [req-a60b3a1f-74d9-42a1-8ae4-2e9c0b89341c req-bf046d1e-fa74-4030-b8e2-f2b2f9fe5c3e service nova] Releasing lock "refresh_cache-dba1edda-edfd-4a97-ab95-48f3f5a933f8" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.289469] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4645cf1e-15cd-4ec1-bc73-e9f10bf33880 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.309876] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "cce823b9-6a03-4902-9794-2b93f99eef94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.310137] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "cce823b9-6a03-4902-9794-2b93f99eef94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.310339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "cce823b9-6a03-4902-9794-2b93f99eef94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.310565] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "cce823b9-6a03-4902-9794-2b93f99eef94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.310746] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "cce823b9-6a03-4902-9794-2b93f99eef94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.315143] env[62204]: INFO nova.compute.manager [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Terminating instance [ 796.317534] env[62204]: DEBUG nova.compute.manager [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 796.317751] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.318701] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dcfa0a-9455-4f50-b30f-26bdfce61230 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.326331] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.334089] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51c48976-6032-475d-85d9-3659e6d51748 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.336397] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 69604167-6a61-4723-bf7d-7ba168837839 could not be found. [ 796.336670] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.336963] env[62204]: INFO nova.compute.manager [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Took 0.11 seconds to destroy the instance on the hypervisor. [ 796.337238] env[62204]: DEBUG oslo.service.loopingcall [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 796.340405] env[62204]: DEBUG nova.compute.manager [-] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 796.340516] env[62204]: DEBUG nova.network.neutron [-] [instance: 69604167-6a61-4723-bf7d-7ba168837839] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.347723] env[62204]: DEBUG oslo_vmware.api [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 796.347723] env[62204]: value = "task-1199691" [ 796.347723] env[62204]: _type = "Task" [ 796.347723] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.365757] env[62204]: DEBUG oslo_vmware.api [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.515428] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d4aaf3-5a5b-4d6f-9b98-63de573ee98f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.527212] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e554deb3-ac1a-47d8-9336-12e3d5dc2a1a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.574992] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8626e0-824e-4b43-9b22-ae463d67eb1d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.586490] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499442} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.589212] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8/dba1edda-edfd-4a97-ab95-48f3f5a933f8.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.589710] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.590117] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9346f4cb-ff3f-4b8d-82b3-82561e783a1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.593410] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171b0c6c-a88a-4588-9e59-9249bd10621d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.615170] env[62204]: DEBUG nova.compute.provider_tree [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.621142] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 796.621142] env[62204]: value = "task-1199692" [ 796.621142] env[62204]: _type = "Task" [ 796.621142] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.631527] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199692, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.657754] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.658145] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1990934-2da4-4d78-a22e-eb75ae2a4ee0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.665723] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 796.665723] env[62204]: value = "task-1199693" [ 796.665723] env[62204]: _type = "Task" [ 796.665723] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.675265] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.725595] env[62204]: DEBUG nova.network.neutron [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.860670] env[62204]: DEBUG oslo_vmware.api [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199691, 'name': PowerOffVM_Task, 'duration_secs': 0.218838} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.864588] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.864588] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.864719] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afb30795-4e24-4030-baf7-aee30df2d89c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.937723] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 796.939050] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.941361] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.941361] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Deleting the datastore file [datastore1] cce823b9-6a03-4902-9794-2b93f99eef94 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.942328] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a5552f2-5efb-4352-bc6b-063b92111eb9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.951950] env[62204]: DEBUG oslo_vmware.api [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for the task: (returnval){ [ 796.951950] env[62204]: value = "task-1199695" [ 796.951950] env[62204]: _type = "Task" [ 796.951950] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.969073] env[62204]: DEBUG oslo_vmware.api [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.980601] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 796.981369] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 796.981661] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.981953] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 796.982304] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.982587] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 796.983056] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 796.983409] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 796.984966] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 796.984966] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 796.984966] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 796.984966] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9ff2b8-23d0-4691-bb0a-d47cf0178df1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.995786] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4200e5eb-b6a4-4b8a-8823-92bc14094e4a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.123977] env[62204]: DEBUG nova.scheduler.client.report [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 797.139630] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199692, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07421} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.139630] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.139630] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02aac335-d05f-4523-9f24-e7430561b2b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.165540] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8/dba1edda-edfd-4a97-ab95-48f3f5a933f8.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.166477] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-503d0e46-aecb-4014-982d-5de7556f98b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.194227] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199693, 'name': PowerOffVM_Task, 'duration_secs': 0.261735} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.196071] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.196338] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 797.196338] env[62204]: value = "task-1199696" [ 797.196338] env[62204]: _type = "Task" [ 797.196338] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.197202] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d1f4cc-7014-4f84-afa3-f2da5b32ac4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.223179] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.224050] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd8a901-2d55-4d2e-9993-f2643964d560 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.338545] env[62204]: DEBUG nova.network.neutron [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Updating instance_info_cache with network_info: [{"id": "8a6c2acb-ed90-490d-b4c8-9ab8037a80d2", "address": "fa:16:3e:6f:e6:3c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6c2acb-ed", "ovs_interfaceid": "8a6c2acb-ed90-490d-b4c8-9ab8037a80d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.467188] env[62204]: DEBUG oslo_vmware.api [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Task: {'id': task-1199695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321312} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.468183] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.468459] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.468735] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.469026] env[62204]: INFO nova.compute.manager [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Took 1.15 seconds to destroy the instance on the hypervisor. [ 797.469342] env[62204]: DEBUG oslo.service.loopingcall [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.469564] env[62204]: DEBUG nova.compute.manager [-] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 797.469713] env[62204]: DEBUG nova.network.neutron [-] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 797.570878] env[62204]: DEBUG nova.network.neutron [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Successfully updated port: 1eef631c-fa58-4ad5-862e-c25b847eaa2c {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.629670] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.630096] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 797.634226] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.739s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.636112] env[62204]: INFO nova.compute.claims [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 797.712299] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.737022] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 797.737022] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-baccc0e3-8825-4472-8da7-1b31521c4e41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.748683] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 797.748683] env[62204]: value = "task-1199697" [ 797.748683] env[62204]: _type = "Task" [ 797.748683] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.761254] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199697, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.844754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-2c393123-87de-460a-965d-43473478a79f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 797.844754] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance network_info: |[{"id": "8a6c2acb-ed90-490d-b4c8-9ab8037a80d2", "address": "fa:16:3e:6f:e6:3c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6c2acb-ed", "ovs_interfaceid": "8a6c2acb-ed90-490d-b4c8-9ab8037a80d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 797.846927] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:e6:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a6c2acb-ed90-490d-b4c8-9ab8037a80d2', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 797.859290] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating folder: Project (be5f3f8b28ab4b63a2621b1fe1383af0). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.859290] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1511d9ca-faaa-43a8-8a3f-77c5670b6cd5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.875365] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created folder: Project (be5f3f8b28ab4b63a2621b1fe1383af0) in parent group-v259933. [ 797.875365] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating folder: Instances. Parent ref: group-v260024. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 797.875365] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fd54da8-d056-4dec-bc9e-ced8ef78d9a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.887012] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created folder: Instances in parent group-v260024. [ 797.887868] env[62204]: DEBUG oslo.service.loopingcall [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 797.888243] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 797.891027] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33df296f-f4be-4431-a5f4-e0cdc395c28d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.917208] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 797.917208] env[62204]: value = "task-1199700" [ 797.917208] env[62204]: _type = "Task" [ 797.917208] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.929926] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199700, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.074921] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.074921] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.074921] env[62204]: DEBUG nova.network.neutron [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 798.137207] env[62204]: DEBUG nova.compute.utils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 798.139672] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 798.139875] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 798.215818] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199696, 'name': ReconfigVM_Task, 'duration_secs': 0.688896} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.216177] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Reconfigured VM instance instance-0000003d to attach disk [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8/dba1edda-edfd-4a97-ab95-48f3f5a933f8.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.216930] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c1f908b-2412-4a1f-8dbf-7d432187346a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.221620] env[62204]: DEBUG nova.policy [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6169c0354c748aaa350e0d310620eb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '286c8643ef8c4d5ca5482bba5952ed53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 798.229544] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 798.229544] env[62204]: value = "task-1199701" [ 798.229544] env[62204]: _type = "Task" [ 798.229544] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.240773] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199701, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.241526] env[62204]: DEBUG nova.network.neutron [-] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.267697] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199697, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.430345] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199700, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.491092] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Successfully updated port: 557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 798.553658] env[62204]: DEBUG nova.compute.manager [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Received event network-vif-plugged-8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.555367] env[62204]: DEBUG oslo_concurrency.lockutils [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] Acquiring lock "2c393123-87de-460a-965d-43473478a79f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.555367] env[62204]: DEBUG oslo_concurrency.lockutils [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] Lock "2c393123-87de-460a-965d-43473478a79f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 798.555367] env[62204]: DEBUG oslo_concurrency.lockutils [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] Lock "2c393123-87de-460a-965d-43473478a79f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.555367] env[62204]: DEBUG nova.compute.manager [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] No waiting events found dispatching network-vif-plugged-8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 798.555367] env[62204]: WARNING nova.compute.manager [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Received unexpected event network-vif-plugged-8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 for instance with vm_state building and task_state spawning. [ 798.555367] env[62204]: DEBUG nova.compute.manager [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Received event network-changed-8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 798.555367] env[62204]: DEBUG nova.compute.manager [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Refreshing instance network info cache due to event network-changed-8a6c2acb-ed90-490d-b4c8-9ab8037a80d2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 798.555367] env[62204]: DEBUG oslo_concurrency.lockutils [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] Acquiring lock "refresh_cache-2c393123-87de-460a-965d-43473478a79f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 798.555367] env[62204]: DEBUG oslo_concurrency.lockutils [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] Acquired lock "refresh_cache-2c393123-87de-460a-965d-43473478a79f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.555752] env[62204]: DEBUG nova.network.neutron [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Refreshing network info cache for port 8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 798.561893] env[62204]: DEBUG nova.network.neutron [-] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.645234] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 798.683091] env[62204]: WARNING nova.network.neutron [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] 3b20dcf5-a239-493a-bd84-9815cabea48a already exists in list: networks containing: ['3b20dcf5-a239-493a-bd84-9815cabea48a']. ignoring it [ 798.683321] env[62204]: WARNING nova.network.neutron [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] 3b20dcf5-a239-493a-bd84-9815cabea48a already exists in list: networks containing: ['3b20dcf5-a239-493a-bd84-9815cabea48a']. ignoring it [ 798.742954] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199701, 'name': Rename_Task, 'duration_secs': 0.178123} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.746976] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.747485] env[62204]: INFO nova.compute.manager [-] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Took 2.41 seconds to deallocate network for instance. [ 798.747999] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a508b98e-324b-4681-bf69-b8b4ae8e827d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.758738] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 798.758738] env[62204]: value = "task-1199702" [ 798.758738] env[62204]: _type = "Task" [ 798.758738] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.767468] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199697, 'name': CreateSnapshot_Task, 'duration_secs': 0.75643} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.771290] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 798.772515] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c04fe51-054b-4e91-99bd-ab276eb62735 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.779859] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199702, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.867876] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Successfully created port: c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 798.936889] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199700, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.004339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "refresh_cache-67ee5c4d-3825-4580-a26e-74eb8da50883" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.004339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "refresh_cache-67ee5c4d-3825-4580-a26e-74eb8da50883" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.004339] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 799.070689] env[62204]: INFO nova.compute.manager [-] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Took 1.60 seconds to deallocate network for instance. [ 799.104224] env[62204]: DEBUG nova.compute.manager [req-ca9d5881-a80d-4f7d-87e9-c7c3b87f8a8a req-1ca5a789-bdd5-4c1b-8ccd-126dd4950da5 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-vif-plugged-1eef631c-fa58-4ad5-862e-c25b847eaa2c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 799.104224] env[62204]: DEBUG oslo_concurrency.lockutils [req-ca9d5881-a80d-4f7d-87e9-c7c3b87f8a8a req-1ca5a789-bdd5-4c1b-8ccd-126dd4950da5 service nova] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.104835] env[62204]: DEBUG oslo_concurrency.lockutils [req-ca9d5881-a80d-4f7d-87e9-c7c3b87f8a8a req-1ca5a789-bdd5-4c1b-8ccd-126dd4950da5 service nova] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.104835] env[62204]: DEBUG oslo_concurrency.lockutils [req-ca9d5881-a80d-4f7d-87e9-c7c3b87f8a8a req-1ca5a789-bdd5-4c1b-8ccd-126dd4950da5 service nova] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.105014] env[62204]: DEBUG nova.compute.manager [req-ca9d5881-a80d-4f7d-87e9-c7c3b87f8a8a req-1ca5a789-bdd5-4c1b-8ccd-126dd4950da5 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] No waiting events found dispatching network-vif-plugged-1eef631c-fa58-4ad5-862e-c25b847eaa2c {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 799.105408] env[62204]: WARNING nova.compute.manager [req-ca9d5881-a80d-4f7d-87e9-c7c3b87f8a8a req-1ca5a789-bdd5-4c1b-8ccd-126dd4950da5 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received unexpected event network-vif-plugged-1eef631c-fa58-4ad5-862e-c25b847eaa2c for instance with vm_state active and task_state None. [ 799.189625] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8168752-c4d1-4cb9-bf0f-67341bc4a272 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.205020] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e948be39-0660-461b-8535-e0a063428370 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.252323] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41004ef1-69f2-4be6-967f-4731617e8e0e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.266335] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e71010e-538f-47af-a8fd-2eb554027e09 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.289767] env[62204]: DEBUG nova.compute.provider_tree [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.304420] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 799.304946] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199702, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.307094] env[62204]: DEBUG nova.scheduler.client.report [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 799.311351] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6422eb7d-7812-4aa3-b459-d47987cbf60f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.324282] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 799.324282] env[62204]: value = "task-1199703" [ 799.324282] env[62204]: _type = "Task" [ 799.324282] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.327982] env[62204]: INFO nova.compute.manager [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Took 0.58 seconds to detach 1 volumes for instance. [ 799.333332] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199703, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.334041] env[62204]: DEBUG nova.compute.manager [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Deleting volume: e1a1d343-79bf-455c-8446-09fa8e9f2035 {{(pid=62204) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 799.432377] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199700, 'name': CreateVM_Task, 'duration_secs': 1.446696} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.432377] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.432377] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.432377] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.432377] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 799.432377] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91531c4e-b3ed-45b4-988d-128c175b4624 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.438625] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 799.438625] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52233e52-6175-07c6-4949-0907d74de7cc" [ 799.438625] env[62204]: _type = "Task" [ 799.438625] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.452494] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52233e52-6175-07c6-4949-0907d74de7cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.577526] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.605323] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 799.616139] env[62204]: DEBUG nova.network.neutron [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Updated VIF entry in instance network info cache for port 8a6c2acb-ed90-490d-b4c8-9ab8037a80d2. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 799.617191] env[62204]: DEBUG nova.network.neutron [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Updating instance_info_cache with network_info: [{"id": "8a6c2acb-ed90-490d-b4c8-9ab8037a80d2", "address": "fa:16:3e:6f:e6:3c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6c2acb-ed", "ovs_interfaceid": "8a6c2acb-ed90-490d-b4c8-9ab8037a80d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.659654] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 799.777521] env[62204]: DEBUG oslo_vmware.api [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199702, 'name': PowerOnVM_Task, 'duration_secs': 0.604072} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.777872] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 799.778096] env[62204]: INFO nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Took 8.03 seconds to spawn the instance on the hypervisor. [ 799.778957] env[62204]: DEBUG nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 799.779169] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c308fc-774f-43a7-a552-58ffd6fbd7f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.784312] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 799.784547] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 799.784707] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 799.784890] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 799.785053] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 799.785209] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 799.785445] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 799.785604] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 799.785787] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 799.785956] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 799.786175] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 799.787454] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899da715-df9c-4c2b-b933-436b8120a5ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.801092] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6722555-9f6b-4b74-afcb-aa288479fcb6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.824369] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.825247] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 799.830042] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.569s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.832565] env[62204]: INFO nova.compute.claims [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.848541] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199703, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.891698] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 799.952552] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52233e52-6175-07c6-4949-0907d74de7cc, 'name': SearchDatastore_Task, 'duration_secs': 0.012897} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.952889] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.953248] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.953560] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 799.954256] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.954256] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.954363] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54f22a1d-86f1-440f-96de-203a3577a305 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.966313] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.966313] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 799.966313] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c054768-fc17-41fa-9a86-d65dcc68af85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.972296] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 799.972296] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5246a980-df88-4658-26db-caffda6439de" [ 799.972296] env[62204]: _type = "Task" [ 799.972296] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.983939] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5246a980-df88-4658-26db-caffda6439de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.004058] env[62204]: DEBUG nova.network.neutron [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "address": "fa:16:3e:36:c2:b3", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33e67759-b1", "ovs_interfaceid": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1eef631c-fa58-4ad5-862e-c25b847eaa2c", "address": "fa:16:3e:e0:4e:df", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eef631c-fa", "ovs_interfaceid": "1eef631c-fa58-4ad5-862e-c25b847eaa2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.104745] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Updating instance_info_cache with network_info: [{"id": "557947cd-ca92-41e1-a093-6b0f7718f5cd", "address": "fa:16:3e:ef:5c:d9", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap557947cd-ca", "ovs_interfaceid": "557947cd-ca92-41e1-a093-6b0f7718f5cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.121754] env[62204]: DEBUG oslo_concurrency.lockutils [req-642d5227-72a1-42a4-bed4-a5c6eeb5f5e4 req-4ac7a9f2-24bc-4cdb-9509-8e967de223a4 service nova] Releasing lock "refresh_cache-2c393123-87de-460a-965d-43473478a79f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.319400] env[62204]: INFO nova.compute.manager [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Took 47.87 seconds to build instance. [ 800.337563] env[62204]: DEBUG nova.compute.utils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 800.342813] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 800.343020] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 800.358687] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199703, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.405363] env[62204]: DEBUG nova.policy [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6169c0354c748aaa350e0d310620eb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '286c8643ef8c4d5ca5482bba5952ed53', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 800.486827] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5246a980-df88-4658-26db-caffda6439de, 'name': SearchDatastore_Task, 'duration_secs': 0.012833} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.488363] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebc9b274-7163-4720-a942-3419d3a8f3c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.496092] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 800.496092] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5231a7d3-2460-6974-a23a-7951474f5695" [ 800.496092] env[62204]: _type = "Task" [ 800.496092] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.505721] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5231a7d3-2460-6974-a23a-7951474f5695, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.510021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.510021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 800.510021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.510021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f23909-edcb-4799-a11e-ca2961bd0eab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.529032] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.529032] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.529032] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.529032] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.529032] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.529681] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.529681] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.529681] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.529985] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.529985] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.530127] env[62204]: DEBUG nova.virt.hardware [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.537046] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfiguring VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 800.537439] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ac0fcdb-d2a8-4844-8734-edc2969374ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.556132] env[62204]: DEBUG oslo_vmware.api [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 800.556132] env[62204]: value = "task-1199705" [ 800.556132] env[62204]: _type = "Task" [ 800.556132] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.566885] env[62204]: DEBUG oslo_vmware.api [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199705, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.607748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "refresh_cache-67ee5c4d-3825-4580-a26e-74eb8da50883" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 800.608076] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Instance network_info: |[{"id": "557947cd-ca92-41e1-a093-6b0f7718f5cd", "address": "fa:16:3e:ef:5c:d9", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap557947cd-ca", "ovs_interfaceid": "557947cd-ca92-41e1-a093-6b0f7718f5cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 800.608544] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:5c:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7adad264-9276-43ef-9b03-07dc27d3f81e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '557947cd-ca92-41e1-a093-6b0f7718f5cd', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.618347] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Creating folder: Project (286c8643ef8c4d5ca5482bba5952ed53). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.618719] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e102372f-94c6-47de-978d-92217b707376 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.630640] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Created folder: Project (286c8643ef8c4d5ca5482bba5952ed53) in parent group-v259933. [ 800.631385] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Creating folder: Instances. Parent ref: group-v260029. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 800.631720] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f38b947-f7b8-4ded-8001-99394fd7595f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.649264] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Created folder: Instances in parent group-v260029. [ 800.649264] env[62204]: DEBUG oslo.service.loopingcall [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 800.649264] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.649264] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d65d8dd-1424-49a5-8f3d-b0e3e562001c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.671015] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.671015] env[62204]: value = "task-1199708" [ 800.671015] env[62204]: _type = "Task" [ 800.671015] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.683457] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199708, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.822988] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8731511e-1839-46d5-a0ea-bbd41fb5b1cb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.432s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.829218] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Successfully created port: a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.845018] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199703, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.851327] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 800.934851] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Successfully updated port: c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.014711] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5231a7d3-2460-6974-a23a-7951474f5695, 'name': SearchDatastore_Task, 'duration_secs': 0.014177} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.015315] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.015448] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.015803] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d021622e-effa-4d10-a34f-b326416ed95e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.024224] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 801.024224] env[62204]: value = "task-1199709" [ 801.024224] env[62204]: _type = "Task" [ 801.024224] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.033211] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.070044] env[62204]: DEBUG oslo_vmware.api [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.184556] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199708, 'name': CreateVM_Task, 'duration_secs': 0.441247} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.188353] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.190468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.190753] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.191108] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 801.191494] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a5ce706-13d2-4cd7-8dd5-6add2484f611 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.200565] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 801.200565] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52673477-5e5c-fee4-20c6-113908a22e1a" [ 801.200565] env[62204]: _type = "Task" [ 801.200565] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.210206] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52673477-5e5c-fee4-20c6-113908a22e1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.320879] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8051d5-441f-4a83-bb6c-c9409eae6c2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.328926] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 801.333601] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe4eaa9-73e2-460c-a763-103d0d54711f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.347232] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199703, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.386637] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a92c90f-bb63-4041-ac06-72a5d6f989ab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.399701] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7e3aac-05e1-4081-b160-fe266c0a0a14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.417568] env[62204]: DEBUG nova.compute.provider_tree [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.437381] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "refresh_cache-1121b1b8-127e-475f-8dfc-de43911de39a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.437637] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "refresh_cache-1121b1b8-127e-475f-8dfc-de43911de39a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.437883] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.536325] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199709, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.570765] env[62204]: DEBUG oslo_vmware.api [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199705, 'name': ReconfigVM_Task, 'duration_secs': 0.839164} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.571352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.571632] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfigured VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 801.663537] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "98805916-8501-4afb-9e1c-a5393f6e5557" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.663872] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "98805916-8501-4afb-9e1c-a5393f6e5557" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.708743] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52673477-5e5c-fee4-20c6-113908a22e1a, 'name': SearchDatastore_Task, 'duration_secs': 0.025496} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.709762] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.709762] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.709934] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.710107] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.711176] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.711176] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2f21652-7883-4b7b-bbd0-c2c457b19908 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.720982] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.721143] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.721881] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a1a2da-433a-4fee-899f-b336c3b43ad4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.727773] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 801.727773] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522d9b15-1da6-4b9a-6b7e-bb1f6c0ac26e" [ 801.727773] env[62204]: _type = "Task" [ 801.727773] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.737682] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522d9b15-1da6-4b9a-6b7e-bb1f6c0ac26e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.844417] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199703, 'name': CloneVM_Task, 'duration_secs': 2.029562} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.846732] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Created linked-clone VM from snapshot [ 801.847695] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299e4f26-4ba6-470e-8c3f-bf580279e7ab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.855268] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Uploading image c2930f97-8cc3-4e0b-b082-ac3975f12ee6 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 801.860399] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.884023] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 801.884023] env[62204]: value = "vm-260028" [ 801.884023] env[62204]: _type = "VirtualMachine" [ 801.884023] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 801.884350] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5d0c2476-00a5-41f1-8926-4a19fc223661 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.886841] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 801.894560] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease: (returnval){ [ 801.894560] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5261a527-4afd-5836-c4d8-4652cce47113" [ 801.894560] env[62204]: _type = "HttpNfcLease" [ 801.894560] env[62204]: } obtained for exporting VM: (result){ [ 801.894560] env[62204]: value = "vm-260028" [ 801.894560] env[62204]: _type = "VirtualMachine" [ 801.894560] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 801.894815] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the lease: (returnval){ [ 801.894815] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5261a527-4afd-5836-c4d8-4652cce47113" [ 801.894815] env[62204]: _type = "HttpNfcLease" [ 801.894815] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 801.902132] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 801.902132] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5261a527-4afd-5836-c4d8-4652cce47113" [ 801.902132] env[62204]: _type = "HttpNfcLease" [ 801.902132] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 801.907278] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.907599] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.907805] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.908096] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.908284] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.908567] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.908724] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.908928] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.909129] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.909349] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.909565] env[62204]: DEBUG nova.virt.hardware [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.910419] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3ed4d9-96b8-43f3-b55c-93cf52d5ffb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.918261] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2742b5d7-e06e-4acd-a78d-81e9d5da4be7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.922678] env[62204]: DEBUG nova.scheduler.client.report [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 802.036673] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660134} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.037461] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 802.037461] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.037714] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f19f9afd-06b4-4ff3-a283-d2000f85c9e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.044121] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 802.044121] env[62204]: value = "task-1199711" [ 802.044121] env[62204]: _type = "Task" [ 802.044121] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.048201] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.055183] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.077324] env[62204]: DEBUG oslo_concurrency.lockutils [None req-61b37359-1ecb-42d9-8df2-f344945481ea tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-1eef631c-fa58-4ad5-862e-c25b847eaa2c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.417s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.150991] env[62204]: DEBUG nova.compute.manager [req-50f9f168-5f21-4d8a-ae2f-28f809046703 req-e7559a3e-1c5b-4279-af19-007169842290 service nova] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Received event network-vif-deleted-70a9a29f-b59c-48d3-8c34-c3bbff8169e0 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.151281] env[62204]: DEBUG nova.compute.manager [req-50f9f168-5f21-4d8a-ae2f-28f809046703 req-e7559a3e-1c5b-4279-af19-007169842290 service nova] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Received event network-vif-deleted-3c41dfa3-6f25-4539-9cbc-c84270886db1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 802.210194] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Updating instance_info_cache with network_info: [{"id": "c5dff251-4a4b-46c8-908a-22925c09890c", "address": "fa:16:3e:5a:27:c0", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dff251-4a", "ovs_interfaceid": "c5dff251-4a4b-46c8-908a-22925c09890c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.241273] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522d9b15-1da6-4b9a-6b7e-bb1f6c0ac26e, 'name': SearchDatastore_Task, 'duration_secs': 0.025867} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.242104] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e8d0278-9426-4cd8-92d4-fad4a1aa7ab6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.248423] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 802.248423] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c5bf5-d9a7-3212-7273-509a3382dbb1" [ 802.248423] env[62204]: _type = "Task" [ 802.248423] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.256039] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c5bf5-d9a7-3212-7273-509a3382dbb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.402986] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 802.402986] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5261a527-4afd-5836-c4d8-4652cce47113" [ 802.402986] env[62204]: _type = "HttpNfcLease" [ 802.402986] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 802.403380] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 802.403380] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5261a527-4afd-5836-c4d8-4652cce47113" [ 802.403380] env[62204]: _type = "HttpNfcLease" [ 802.403380] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 802.404083] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975cb450-2c10-4641-8574-b8d63bad8ea5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.413686] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c267-baf1-7f9c-e9fe-f91438c12cf5/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 802.413902] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c267-baf1-7f9c-e9fe-f91438c12cf5/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 802.478113] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.478113] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 802.479381] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.701s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.479623] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.481708] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.531s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.481913] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.483702] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.238s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.485208] env[62204]: INFO nova.compute.claims [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 802.517753] env[62204]: INFO nova.scheduler.client.report [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleted allocations for instance ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702 [ 802.520741] env[62204]: INFO nova.scheduler.client.report [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Deleted allocations for instance 703bf0c4-9bff-4967-8e84-09969b32b5a1 [ 802.556379] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066876} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.556643] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.557281] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2805cf5b-1690-47be-8aba-cbe80b7ddb6f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.581076] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.585552] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-970ded34-3c4e-4856-b331-c5a0f054cc20 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.602834] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ad19184b-769c-45c1-9e49-5a8d41ee5437 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.611756] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 802.611756] env[62204]: value = "task-1199712" [ 802.611756] env[62204]: _type = "Task" [ 802.611756] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.621150] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199712, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.632594] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.633731] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.712759] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "refresh_cache-1121b1b8-127e-475f-8dfc-de43911de39a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.713134] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Instance network_info: |[{"id": "c5dff251-4a4b-46c8-908a-22925c09890c", "address": "fa:16:3e:5a:27:c0", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dff251-4a", "ovs_interfaceid": "c5dff251-4a4b-46c8-908a-22925c09890c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 802.713649] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:27:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7adad264-9276-43ef-9b03-07dc27d3f81e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5dff251-4a4b-46c8-908a-22925c09890c', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 802.721475] env[62204]: DEBUG oslo.service.loopingcall [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.721780] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 802.722039] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-281bc41c-0267-49e2-a7f6-5d6f44825f9f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.743377] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 802.743377] env[62204]: value = "task-1199713" [ 802.743377] env[62204]: _type = "Task" [ 802.743377] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.751337] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199713, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.759897] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c5bf5-d9a7-3212-7273-509a3382dbb1, 'name': SearchDatastore_Task, 'duration_secs': 0.026937} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.760191] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 802.760454] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 67ee5c4d-3825-4580-a26e-74eb8da50883/67ee5c4d-3825-4580-a26e-74eb8da50883.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 802.760720] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bc23309-73ab-419f-850f-447bc8432bf9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.767049] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 802.767049] env[62204]: value = "task-1199714" [ 802.767049] env[62204]: _type = "Task" [ 802.767049] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.776526] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.882897] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "0a720922-60ea-4b31-ba56-cdcbba1ab629" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.883372] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "0a720922-60ea-4b31-ba56-cdcbba1ab629" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.902239] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Successfully updated port: a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 802.992584] env[62204]: DEBUG nova.compute.utils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.995704] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 802.995704] env[62204]: DEBUG nova.network.neutron [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 803.015879] env[62204]: DEBUG nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-changed-1eef631c-fa58-4ad5-862e-c25b847eaa2c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 803.016290] env[62204]: DEBUG nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing instance network info cache due to event network-changed-1eef631c-fa58-4ad5-862e-c25b847eaa2c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 803.016741] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.016819] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.017962] env[62204]: DEBUG nova.network.neutron [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Refreshing network info cache for port 1eef631c-fa58-4ad5-862e-c25b847eaa2c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.030933] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d3d76fb-109b-4e3c-b2a2-ba9dd04f3109 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.232s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.036321] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b9b1d9a5-ec2a-4fea-983c-11201cb73b50 tempest-MultipleCreateTestJSON-773792924 tempest-MultipleCreateTestJSON-773792924-project-member] Lock "703bf0c4-9bff-4967-8e84-09969b32b5a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.305s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.078806] env[62204]: DEBUG nova.policy [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6db3ba1bb9b9464d870969f1f7d95a9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ef8dc436e4b45d0a8d50468666358e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 803.124095] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199712, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.260109] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199713, 'name': CreateVM_Task, 'duration_secs': 0.379042} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.260109] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 803.262022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.262022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.262022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 803.263353] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f01fc0b7-0229-44cf-af44-3d5e8aa1b3e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.270821] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 803.270821] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524421cb-a1fb-dc03-9891-c2c8e4912ddb" [ 803.270821] env[62204]: _type = "Task" [ 803.270821] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.287283] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199714, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.292553] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524421cb-a1fb-dc03-9891-c2c8e4912ddb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.406215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "refresh_cache-1a1cb81f-383e-48de-8c11-3d5e2c801f40" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.406215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "refresh_cache-1a1cb81f-383e-48de-8c11-3d5e2c801f40" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.406319] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.500756] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 803.632411] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199712, 'name': ReconfigVM_Task, 'duration_secs': 0.521199} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.634047] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.634047] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-500f4c8e-a745-4531-8dee-efbd74653c63 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.645454] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 803.645454] env[62204]: value = "task-1199715" [ 803.645454] env[62204]: _type = "Task" [ 803.645454] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.660234] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199715, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.782143] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680237} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.786498] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 67ee5c4d-3825-4580-a26e-74eb8da50883/67ee5c4d-3825-4580-a26e-74eb8da50883.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.786751] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.788489] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27590dc0-41ad-47de-996a-dab3364e1f6b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.798482] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524421cb-a1fb-dc03-9891-c2c8e4912ddb, 'name': SearchDatastore_Task, 'duration_secs': 0.062286} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.799519] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.799873] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 803.800055] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.800207] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.800397] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.803600] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6220cecd-3e0b-45f2-bcc8-6a6299683598 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.806193] env[62204]: DEBUG nova.network.neutron [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Successfully created port: 4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.812497] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 803.812497] env[62204]: value = "task-1199716" [ 803.812497] env[62204]: _type = "Task" [ 803.812497] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.819137] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.819960] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 803.825430] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e00335a-8304-423b-997f-70ec43777e05 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.827176] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199716, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.834345] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 803.834345] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5264300a-4c64-740b-1e23-dcd6514a2f9f" [ 803.834345] env[62204]: _type = "Task" [ 803.834345] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.844132] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5264300a-4c64-740b-1e23-dcd6514a2f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.859081] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0380b7c5-c246-4860-b196-dfb689232467 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.866955] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Suspending the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 803.867653] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e972575e-8785-4ce1-a7f0-9de8d424d7d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.876304] env[62204]: DEBUG oslo_vmware.api [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 803.876304] env[62204]: value = "task-1199717" [ 803.876304] env[62204]: _type = "Task" [ 803.876304] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.894179] env[62204]: DEBUG oslo_vmware.api [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199717, 'name': SuspendVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.901152] env[62204]: DEBUG nova.network.neutron [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updated VIF entry in instance network info cache for port 1eef631c-fa58-4ad5-862e-c25b847eaa2c. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.901755] env[62204]: DEBUG nova.network.neutron [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "address": "fa:16:3e:36:c2:b3", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33e67759-b1", "ovs_interfaceid": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1eef631c-fa58-4ad5-862e-c25b847eaa2c", "address": "fa:16:3e:e0:4e:df", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eef631c-fa", "ovs_interfaceid": "1eef631c-fa58-4ad5-862e-c25b847eaa2c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.964132] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.037077] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6d614f-1d78-464e-93db-fc3864470ad9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.046449] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb7dddc0-07a5-4582-91e0-8065de67e391 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.088392] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d69c16-a7f7-4e3b-9835-8d3661e5f32e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.098488] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9ffd81-1b88-408e-a785-705b14abbf3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.118569] env[62204]: DEBUG nova.compute.provider_tree [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.159562] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199715, 'name': Rename_Task, 'duration_secs': 0.154712} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.160103] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.160510] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e00a5f31-a279-49c5-a09d-93b882caebf4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.171923] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 804.171923] env[62204]: value = "task-1199718" [ 804.171923] env[62204]: _type = "Task" [ 804.171923] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.183300] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199718, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.246341] env[62204]: DEBUG nova.network.neutron [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Updating instance_info_cache with network_info: [{"id": "a4e77bf2-081b-4fb9-9878-8c990c48fe46", "address": "fa:16:3e:bf:41:60", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4e77bf2-08", "ovs_interfaceid": "a4e77bf2-081b-4fb9-9878-8c990c48fe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.327020] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199716, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070371} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.327020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.327020] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6fab15-b80f-4bd8-8a94-3ca210dd6e6a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.350574] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 67ee5c4d-3825-4580-a26e-74eb8da50883/67ee5c4d-3825-4580-a26e-74eb8da50883.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.356776] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37297b9d-85b8-473b-ba1b-cc4e1c7b161a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.374331] env[62204]: DEBUG nova.compute.manager [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Received event network-vif-plugged-c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.374571] env[62204]: DEBUG oslo_concurrency.lockutils [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] Acquiring lock "1121b1b8-127e-475f-8dfc-de43911de39a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.374986] env[62204]: DEBUG oslo_concurrency.lockutils [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] Lock "1121b1b8-127e-475f-8dfc-de43911de39a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.375343] env[62204]: DEBUG oslo_concurrency.lockutils [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] Lock "1121b1b8-127e-475f-8dfc-de43911de39a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.375499] env[62204]: DEBUG nova.compute.manager [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] No waiting events found dispatching network-vif-plugged-c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 804.375727] env[62204]: WARNING nova.compute.manager [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Received unexpected event network-vif-plugged-c5dff251-4a4b-46c8-908a-22925c09890c for instance with vm_state building and task_state spawning. [ 804.376024] env[62204]: DEBUG nova.compute.manager [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Received event network-changed-c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.376244] env[62204]: DEBUG nova.compute.manager [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Refreshing instance network info cache due to event network-changed-c5dff251-4a4b-46c8-908a-22925c09890c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 804.376591] env[62204]: DEBUG oslo_concurrency.lockutils [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] Acquiring lock "refresh_cache-1121b1b8-127e-475f-8dfc-de43911de39a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.376772] env[62204]: DEBUG oslo_concurrency.lockutils [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] Acquired lock "refresh_cache-1121b1b8-127e-475f-8dfc-de43911de39a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.377015] env[62204]: DEBUG nova.network.neutron [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Refreshing network info cache for port c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.389369] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 804.389369] env[62204]: value = "task-1199719" [ 804.389369] env[62204]: _type = "Task" [ 804.389369] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.389668] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5264300a-4c64-740b-1e23-dcd6514a2f9f, 'name': SearchDatastore_Task, 'duration_secs': 0.014759} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.395412] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00b58b07-e07e-4064-835c-d27d1a098f3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.402264] env[62204]: DEBUG oslo_vmware.api [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199717, 'name': SuspendVM_Task} progress is 62%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.405306] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.405306] env[62204]: DEBUG nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Received event network-vif-plugged-557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.405793] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Acquiring lock "67ee5c4d-3825-4580-a26e-74eb8da50883-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.405793] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.406522] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.406522] env[62204]: DEBUG nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] No waiting events found dispatching network-vif-plugged-557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 804.406522] env[62204]: WARNING nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Received unexpected event network-vif-plugged-557947cd-ca92-41e1-a093-6b0f7718f5cd for instance with vm_state building and task_state spawning. [ 804.406522] env[62204]: DEBUG nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Received event network-changed-557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 804.406748] env[62204]: DEBUG nova.compute.manager [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Refreshing instance network info cache due to event network-changed-557947cd-ca92-41e1-a093-6b0f7718f5cd. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 804.407147] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Acquiring lock "refresh_cache-67ee5c4d-3825-4580-a26e-74eb8da50883" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 804.407347] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Acquired lock "refresh_cache-67ee5c4d-3825-4580-a26e-74eb8da50883" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.407528] env[62204]: DEBUG nova.network.neutron [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Refreshing network info cache for port 557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 804.414548] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 804.414548] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5210eb05-fca6-54d7-1c11-225188b19e0f" [ 804.414548] env[62204]: _type = "Task" [ 804.414548] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.415147] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.426010] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5210eb05-fca6-54d7-1c11-225188b19e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.015405} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.426427] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.426732] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 1121b1b8-127e-475f-8dfc-de43911de39a/1121b1b8-127e-475f-8dfc-de43911de39a.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 804.427045] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff086eaa-ba8b-419d-a51a-88658c01fc58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.435159] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 804.435159] env[62204]: value = "task-1199720" [ 804.435159] env[62204]: _type = "Task" [ 804.435159] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.444971] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199720, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.515255] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 804.621846] env[62204]: DEBUG nova.scheduler.client.report [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 804.683854] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199718, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.749016] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "refresh_cache-1a1cb81f-383e-48de-8c11-3d5e2c801f40" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.749409] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Instance network_info: |[{"id": "a4e77bf2-081b-4fb9-9878-8c990c48fe46", "address": "fa:16:3e:bf:41:60", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4e77bf2-08", "ovs_interfaceid": "a4e77bf2-081b-4fb9-9878-8c990c48fe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 804.749884] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:41:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7adad264-9276-43ef-9b03-07dc27d3f81e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4e77bf2-081b-4fb9-9878-8c990c48fe46', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 804.764145] env[62204]: DEBUG oslo.service.loopingcall [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.764145] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 804.764145] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e2cd537-46a6-4c67-ab74-e83ca6a26d8f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.792540] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 804.792540] env[62204]: value = "task-1199721" [ 804.792540] env[62204]: _type = "Task" [ 804.792540] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.807913] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199721, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.902511] env[62204]: DEBUG oslo_vmware.api [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199717, 'name': SuspendVM_Task, 'duration_secs': 0.8451} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.903577] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Suspended the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 804.903963] env[62204]: DEBUG nova.compute.manager [None req-62d86921-721f-4d1b-8795-8d72590a21fb tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 804.905042] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c098f5b9-3f7c-4f4a-be98-4f021f477265 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.913046] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199719, 'name': ReconfigVM_Task, 'duration_secs': 0.467191} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.914139] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 67ee5c4d-3825-4580-a26e-74eb8da50883/67ee5c4d-3825-4580-a26e-74eb8da50883.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.915147] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38be3968-2893-4d20-9271-d2a47ca7e602 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.930896] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 804.930896] env[62204]: value = "task-1199722" [ 804.930896] env[62204]: _type = "Task" [ 804.930896] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.954060] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199722, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.954467] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199720, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.047500] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-33e67759-b1fb-4395-9ed1-bf2102c8d3ee" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.047947] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-33e67759-b1fb-4395-9ed1-bf2102c8d3ee" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.128600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.129445] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 805.134525] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.990s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.134640] env[62204]: DEBUG nova.objects.instance [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'resources' on Instance uuid 137ce499-6602-46b5-b1eb-b03282c2bab3 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.188805] env[62204]: DEBUG oslo_vmware.api [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199718, 'name': PowerOnVM_Task, 'duration_secs': 0.693041} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.189444] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.189829] env[62204]: INFO nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Took 10.92 seconds to spawn the instance on the hypervisor. [ 805.190140] env[62204]: DEBUG nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 805.191344] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14aee7d-bb5e-4ce7-8e2d-0f37df232f38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.200343] env[62204]: DEBUG nova.compute.manager [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Received event network-vif-plugged-a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.200614] env[62204]: DEBUG oslo_concurrency.lockutils [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] Acquiring lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.200894] env[62204]: DEBUG oslo_concurrency.lockutils [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.201103] env[62204]: DEBUG oslo_concurrency.lockutils [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.201291] env[62204]: DEBUG nova.compute.manager [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] No waiting events found dispatching network-vif-plugged-a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 805.201666] env[62204]: WARNING nova.compute.manager [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Received unexpected event network-vif-plugged-a4e77bf2-081b-4fb9-9878-8c990c48fe46 for instance with vm_state building and task_state spawning. [ 805.201666] env[62204]: DEBUG nova.compute.manager [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Received event network-changed-a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.201820] env[62204]: DEBUG nova.compute.manager [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Refreshing instance network info cache due to event network-changed-a4e77bf2-081b-4fb9-9878-8c990c48fe46. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 805.201980] env[62204]: DEBUG oslo_concurrency.lockutils [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] Acquiring lock "refresh_cache-1a1cb81f-383e-48de-8c11-3d5e2c801f40" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.202163] env[62204]: DEBUG oslo_concurrency.lockutils [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] Acquired lock "refresh_cache-1a1cb81f-383e-48de-8c11-3d5e2c801f40" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.202355] env[62204]: DEBUG nova.network.neutron [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Refreshing network info cache for port a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 805.309356] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199721, 'name': CreateVM_Task, 'duration_secs': 0.41957} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.309644] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 805.311437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.311437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.312849] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 805.314901] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30ab02b9-eae7-4037-8eaf-e3b1cd4b9f28 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.322457] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 805.322457] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52144d7e-fa36-a7d0-fbc2-3615558f6f78" [ 805.322457] env[62204]: _type = "Task" [ 805.322457] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.328880] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52144d7e-fa36-a7d0-fbc2-3615558f6f78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.376702] env[62204]: DEBUG nova.network.neutron [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Updated VIF entry in instance network info cache for port c5dff251-4a4b-46c8-908a-22925c09890c. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 805.376702] env[62204]: DEBUG nova.network.neutron [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Updating instance_info_cache with network_info: [{"id": "c5dff251-4a4b-46c8-908a-22925c09890c", "address": "fa:16:3e:5a:27:c0", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5dff251-4a", "ovs_interfaceid": "c5dff251-4a4b-46c8-908a-22925c09890c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.448396] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199720, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625449} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.452016] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 1121b1b8-127e-475f-8dfc-de43911de39a/1121b1b8-127e-475f-8dfc-de43911de39a.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 805.452353] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 805.452697] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199722, 'name': Rename_Task, 'duration_secs': 0.259345} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.453642] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c562128e-385f-4de8-84e4-f99da44dbbbc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.457236] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.457236] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca8882fb-c899-4519-bfd9-eaa07ab0a6cf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.464227] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 805.464227] env[62204]: value = "task-1199723" [ 805.464227] env[62204]: _type = "Task" [ 805.464227] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.465325] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 805.465325] env[62204]: value = "task-1199724" [ 805.465325] env[62204]: _type = "Task" [ 805.465325] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.482026] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.483512] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199724, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.551529] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.551778] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.552841] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453fb813-4642-4f5d-b3c4-2eb3f46d51b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.578691] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f881039-800f-4243-a863-70118eb5fa91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.609765] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfiguring VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 805.610125] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-991133f2-2e8b-4e6b-ad6d-ebd4f1a2db29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.625348] env[62204]: DEBUG nova.network.neutron [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Updated VIF entry in instance network info cache for port 557947cd-ca92-41e1-a093-6b0f7718f5cd. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 805.625786] env[62204]: DEBUG nova.network.neutron [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Updating instance_info_cache with network_info: [{"id": "557947cd-ca92-41e1-a093-6b0f7718f5cd", "address": "fa:16:3e:ef:5c:d9", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap557947cd-ca", "ovs_interfaceid": "557947cd-ca92-41e1-a093-6b0f7718f5cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.636597] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 805.636597] env[62204]: value = "task-1199725" [ 805.636597] env[62204]: _type = "Task" [ 805.636597] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.647148] env[62204]: DEBUG nova.compute.utils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.648948] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 805.649152] env[62204]: DEBUG nova.network.neutron [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 805.662136] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.734129] env[62204]: INFO nova.compute.manager [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Took 49.03 seconds to build instance. [ 805.797149] env[62204]: DEBUG nova.policy [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3476ef077def46f497cb82a8bf6f2bef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15e3c58a34dd4cf69b49d31dc0ef9244', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 805.834059] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52144d7e-fa36-a7d0-fbc2-3615558f6f78, 'name': SearchDatastore_Task, 'duration_secs': 0.02238} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.837119] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.837408] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 805.837698] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.837857] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.838192] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 805.838626] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdf2b2f6-2c1e-4e20-af5a-011f6cd0007d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.849150] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 805.849391] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 805.856159] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7b9fb26-931b-4282-b798-1bceb3e2bb91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.860963] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 805.860963] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52364878-59b2-3345-f31b-b4f49c066c08" [ 805.860963] env[62204]: _type = "Task" [ 805.860963] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.870119] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52364878-59b2-3345-f31b-b4f49c066c08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.880977] env[62204]: DEBUG oslo_concurrency.lockutils [req-3a45a99f-ad8c-4c5c-adf3-860ff2e43a43 req-20d20ce7-a947-4937-ab48-944138c67881 service nova] Releasing lock "refresh_cache-1121b1b8-127e-475f-8dfc-de43911de39a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.992310] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199724, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081706} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.992476] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199723, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.992778] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 805.993741] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069a7f13-9e04-4b74-bab1-15605a2094a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.022707] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 1121b1b8-127e-475f-8dfc-de43911de39a/1121b1b8-127e-475f-8dfc-de43911de39a.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 806.023876] env[62204]: DEBUG nova.network.neutron [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Updated VIF entry in instance network info cache for port a4e77bf2-081b-4fb9-9878-8c990c48fe46. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 806.024260] env[62204]: DEBUG nova.network.neutron [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Updating instance_info_cache with network_info: [{"id": "a4e77bf2-081b-4fb9-9878-8c990c48fe46", "address": "fa:16:3e:bf:41:60", "network": {"id": "1092e3d4-936b-4061-b23c-b2fe2ca949bd", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-721474329-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286c8643ef8c4d5ca5482bba5952ed53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7adad264-9276-43ef-9b03-07dc27d3f81e", "external-id": "nsx-vlan-transportzone-329", "segmentation_id": 329, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4e77bf2-08", "ovs_interfaceid": "a4e77bf2-081b-4fb9-9878-8c990c48fe46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.028970] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-521d6e42-1fde-4af7-9c4b-a25f50b4a009 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.047023] env[62204]: DEBUG oslo_concurrency.lockutils [req-6afa6646-1a06-4191-b370-1af75862b9b1 req-ede8dd99-4731-45fa-b925-c37c94bd4b49 service nova] Releasing lock "refresh_cache-1a1cb81f-383e-48de-8c11-3d5e2c801f40" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.052634] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 806.052634] env[62204]: value = "task-1199726" [ 806.052634] env[62204]: _type = "Task" [ 806.052634] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.065420] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199726, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.134344] env[62204]: DEBUG oslo_concurrency.lockutils [req-011382b8-5f81-4e12-89f5-b095bffd4e8d req-659d84d2-6a81-45dd-a4b4-33ce23b9a159 service nova] Releasing lock "refresh_cache-67ee5c4d-3825-4580-a26e-74eb8da50883" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.155882] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 806.162796] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.177460] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837ddac7-7c57-4f3d-88b1-683af33b454d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.188136] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135bf78a-4b1b-431b-87b2-9ed48aabfdd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.231610] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1abcfb-c28b-47fd-8839-57ab3a27f1f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.240608] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7bb3193-e2e8-4ee8-b4b8-54901952c398 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2c393123-87de-460a-965d-43473478a79f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.500s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.245257] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16726d92-ec79-493c-9a99-6c08f13d36c8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.265189] env[62204]: DEBUG nova.compute.provider_tree [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.306586] env[62204]: DEBUG nova.network.neutron [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Successfully updated port: 4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.372593] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52364878-59b2-3345-f31b-b4f49c066c08, 'name': SearchDatastore_Task, 'duration_secs': 0.014659} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.374150] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e483c4e-da38-48ce-acd6-8bc81504a0b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.380487] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 806.380487] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52350a16-b257-1ee9-2495-cb40732afb48" [ 806.380487] env[62204]: _type = "Task" [ 806.380487] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.389872] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52350a16-b257-1ee9-2495-cb40732afb48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.443208] env[62204]: DEBUG nova.network.neutron [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Successfully created port: e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.476290] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199723, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.535750] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 806.536029] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 806.536216] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 806.536399] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 806.536555] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 806.536834] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 806.537118] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 806.537310] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 806.537477] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 806.537644] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 806.537815] env[62204]: DEBUG nova.virt.hardware [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 806.540132] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea470ed6-0cdd-4393-8ff9-960b0c98a1fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.550437] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a7eaee-270d-4de6-9600-24e6b8e9a4f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.561958] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199726, 'name': ReconfigVM_Task, 'duration_secs': 0.335115} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.572055] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 1121b1b8-127e-475f-8dfc-de43911de39a/1121b1b8-127e-475f-8dfc-de43911de39a.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 806.572940] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8503b371-0159-46e6-9762-bd9bf1bcdbb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.579871] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 806.579871] env[62204]: value = "task-1199727" [ 806.579871] env[62204]: _type = "Task" [ 806.579871] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.588931] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199727, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.649709] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.744675] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 806.771414] env[62204]: DEBUG nova.scheduler.client.report [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 806.810059] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.810263] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.810895] env[62204]: DEBUG nova.network.neutron [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 806.894621] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52350a16-b257-1ee9-2495-cb40732afb48, 'name': SearchDatastore_Task, 'duration_secs': 0.017474} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.894960] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.895267] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 1a1cb81f-383e-48de-8c11-3d5e2c801f40/1a1cb81f-383e-48de-8c11-3d5e2c801f40.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 806.895587] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97ec9243-ac14-43f5-b13d-e8a1b3e52e25 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.907775] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 806.907775] env[62204]: value = "task-1199728" [ 806.907775] env[62204]: _type = "Task" [ 806.907775] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.923545] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.932113] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52865c0f-98ad-439b-ac49-eab061721cca/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 806.934120] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2614fd75-cd8f-4608-9ea5-63c794604e7a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.943032] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52865c0f-98ad-439b-ac49-eab061721cca/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 806.943231] env[62204]: ERROR oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52865c0f-98ad-439b-ac49-eab061721cca/disk-0.vmdk due to incomplete transfer. [ 806.943842] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4cf76078-9837-4537-a91d-9d512667ff1a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.953285] env[62204]: DEBUG oslo_vmware.rw_handles [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52865c0f-98ad-439b-ac49-eab061721cca/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 806.953528] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Uploaded image 4f9ea6a5-3f0b-47f0-a5c9-c4019eca38dd to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 806.956434] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 806.957075] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-148459ba-5813-40f0-8980-20307c2b96f6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.964497] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 806.964497] env[62204]: value = "task-1199729" [ 806.964497] env[62204]: _type = "Task" [ 806.964497] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.976980] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199729, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.981019] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199723, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.090582] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199727, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.151621] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.155302] env[62204]: DEBUG oslo_concurrency.lockutils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.155761] env[62204]: DEBUG oslo_concurrency.lockutils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.166919] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 807.201088] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.201388] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.201555] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.201777] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.201930] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.202246] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.202588] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.202829] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.203097] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.203455] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.203770] env[62204]: DEBUG nova.virt.hardware [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.205256] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a907b6ab-c82b-48c6-892a-67ff262088ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.216018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8045d5f3-1b73-4d50-9496-fd8a57f51725 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.271474] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.274582] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.140s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.277366] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.032s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.279285] env[62204]: INFO nova.compute.claims [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.307784] env[62204]: INFO nova.scheduler.client.report [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocations for instance 137ce499-6602-46b5-b1eb-b03282c2bab3 [ 807.368865] env[62204]: DEBUG nova.network.neutron [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.424577] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199728, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.479043] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199723, 'name': PowerOnVM_Task, 'duration_secs': 1.604631} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.482355] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.482635] env[62204]: INFO nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Took 10.55 seconds to spawn the instance on the hypervisor. [ 807.482847] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 807.483183] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199729, 'name': Destroy_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.484648] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51f4b68-4e39-4cfe-a0bf-7b6070396bce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.590538] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199727, 'name': Rename_Task, 'duration_secs': 0.931563} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.590877] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 807.591181] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f81ce35-2509-48f4-b382-356e13216469 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.597123] env[62204]: DEBUG nova.network.neutron [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.600081] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 807.600081] env[62204]: value = "task-1199730" [ 807.600081] env[62204]: _type = "Task" [ 807.600081] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.611202] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199730, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.653031] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.659727] env[62204]: DEBUG nova.compute.utils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 807.821456] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6463d853-34e0-4396-95b2-7b068897d4d1 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "137ce499-6602-46b5-b1eb-b03282c2bab3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.609s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.924646] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199728, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569977} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.925162] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 1a1cb81f-383e-48de-8c11-3d5e2c801f40/1a1cb81f-383e-48de-8c11-3d5e2c801f40.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 807.925638] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 807.926095] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d97dbe5f-b609-4cf6-a91f-1d97f2b84011 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.935659] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 807.935659] env[62204]: value = "task-1199731" [ 807.935659] env[62204]: _type = "Task" [ 807.935659] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.944481] env[62204]: DEBUG nova.compute.manager [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 807.953238] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8613737-f64b-42bc-a0f2-2195a5414040 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.957387] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199731, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.983022] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199729, 'name': Destroy_Task, 'duration_secs': 0.717082} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.983022] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroyed the VM [ 807.983022] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 807.983022] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0b2a8c59-3341-427d-90f2-cb162f75bc13 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.993028] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 807.993028] env[62204]: value = "task-1199732" [ 807.993028] env[62204]: _type = "Task" [ 807.993028] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.013848] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199732, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.014611] env[62204]: INFO nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Took 48.78 seconds to build instance. [ 808.104390] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.104390] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Instance network_info: |[{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 808.104390] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:25:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e81e820-357d-4b7e-900f-aaac4c7c2798', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.118387] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating folder: Project (1ef8dc436e4b45d0a8d50468666358e3). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.126376] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59395238-7599-4983-aacf-decf4be5fa8f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.137297] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199730, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.139622] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created folder: Project (1ef8dc436e4b45d0a8d50468666358e3) in parent group-v259933. [ 808.140057] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating folder: Instances. Parent ref: group-v260034. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 808.140457] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81ceeef5-ba9a-40aa-ac6d-44675a312375 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.154089] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.155777] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created folder: Instances in parent group-v260034. [ 808.156125] env[62204]: DEBUG oslo.service.loopingcall [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.156383] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.156702] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40f54dd5-ccfa-4f00-8d31-848a080715de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.172911] env[62204]: DEBUG oslo_concurrency.lockutils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.017s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.179212] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 808.179212] env[62204]: value = "task-1199735" [ 808.179212] env[62204]: _type = "Task" [ 808.179212] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.188266] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199735, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.399103] env[62204]: DEBUG nova.network.neutron [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Successfully updated port: e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 808.455053] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199731, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.247595} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.456026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 808.456712] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d79046-44b0-4c5c-93b5-4776739b6727 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.476410] env[62204]: INFO nova.compute.manager [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] instance snapshotting [ 808.476499] env[62204]: WARNING nova.compute.manager [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 808.486859] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 1a1cb81f-383e-48de-8c11-3d5e2c801f40/1a1cb81f-383e-48de-8c11-3d5e2c801f40.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 808.491126] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a518194-23a5-4790-b15e-fbce6b5a6441 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.505413] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e4eff7-6d8f-4e0d-baa8-6b966bd386ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.531914] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.474s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.544971] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d558b442-f60d-4874-9be9-465ca0664e5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.549321] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 808.549321] env[62204]: value = "task-1199736" [ 808.549321] env[62204]: _type = "Task" [ 808.549321] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.549692] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199732, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.574930] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199736, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.620147] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199730, 'name': PowerOnVM_Task, 'duration_secs': 0.836785} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.620147] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 808.620318] env[62204]: INFO nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Took 8.96 seconds to spawn the instance on the hypervisor. [ 808.620598] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 808.621858] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a855cb3-a05c-40f4-995f-54b544a8b7f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.658894] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.693458] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199735, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.871245] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20b5b14-8165-4d4c-b252-f04e7117b618 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.877501] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04171f3c-149e-4b2a-812b-2a4073321208 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.883162] env[62204]: DEBUG nova.compute.manager [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Received event network-vif-plugged-4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.884041] env[62204]: DEBUG oslo_concurrency.lockutils [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.884041] env[62204]: DEBUG oslo_concurrency.lockutils [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.884041] env[62204]: DEBUG oslo_concurrency.lockutils [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.884041] env[62204]: DEBUG nova.compute.manager [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] No waiting events found dispatching network-vif-plugged-4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 808.884258] env[62204]: WARNING nova.compute.manager [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Received unexpected event network-vif-plugged-4e81e820-357d-4b7e-900f-aaac4c7c2798 for instance with vm_state building and task_state spawning. [ 808.884472] env[62204]: DEBUG nova.compute.manager [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Received event network-changed-4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 808.884472] env[62204]: DEBUG nova.compute.manager [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Refreshing instance network info cache due to event network-changed-4e81e820-357d-4b7e-900f-aaac4c7c2798. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 808.884654] env[62204]: DEBUG oslo_concurrency.lockutils [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.884790] env[62204]: DEBUG oslo_concurrency.lockutils [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.884946] env[62204]: DEBUG nova.network.neutron [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Refreshing network info cache for port 4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 808.917954] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "refresh_cache-60eaec9c-5dcc-4e2f-9649-78acba318a6b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.917954] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquired lock "refresh_cache-60eaec9c-5dcc-4e2f-9649-78acba318a6b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.917954] env[62204]: DEBUG nova.network.neutron [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 808.922322] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5969922-f52a-4faf-976e-28b6f96ed164 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.929345] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81458c9f-1b3f-4b04-94d7-d00294f803b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.947015] env[62204]: DEBUG nova.compute.provider_tree [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.020300] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199732, 'name': RemoveSnapshot_Task} progress is 46%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.053869] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 809.064284] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199736, 'name': ReconfigVM_Task, 'duration_secs': 0.541502} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.066172] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 1a1cb81f-383e-48de-8c11-3d5e2c801f40/1a1cb81f-383e-48de-8c11-3d5e2c801f40.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 809.066172] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ace5f39c-6e3e-4568-9897-da2b917b9e7d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.071448] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 809.072015] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-237af374-8b0c-4a09-8818-db7f89f6523c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.075123] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 809.075123] env[62204]: value = "task-1199737" [ 809.075123] env[62204]: _type = "Task" [ 809.075123] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.081602] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 809.081602] env[62204]: value = "task-1199738" [ 809.081602] env[62204]: _type = "Task" [ 809.081602] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.096115] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199737, 'name': Rename_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.102784] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199738, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.155456] env[62204]: INFO nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Took 39.71 seconds to build instance. [ 809.163353] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.191932] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199735, 'name': CreateVM_Task, 'duration_secs': 0.609692} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.192226] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.194591] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.194591] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.194591] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 809.194591] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7242b6fa-c4ee-4ca5-acae-0f2b2764c132 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.202452] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 809.202452] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e63b4c-01f1-3df0-d6a0-037ea38c91d9" [ 809.202452] env[62204]: _type = "Task" [ 809.202452] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.213907] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e63b4c-01f1-3df0-d6a0-037ea38c91d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.283093] env[62204]: DEBUG oslo_concurrency.lockutils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.283403] env[62204]: DEBUG oslo_concurrency.lockutils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.283724] env[62204]: INFO nova.compute.manager [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Attaching volume 36c27753-d664-470d-98bd-effeeada2008 to /dev/sdb [ 809.332576] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beef3158-537d-48c5-b1b7-99ca3bcb79d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.341195] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadb08d3-5166-4d83-902d-5b24241de1f6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.355374] env[62204]: DEBUG nova.virt.block_device [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating existing volume attachment record: bf643ebc-0b24-4ac0-a0e1-8aefddd9fde3 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 809.453397] env[62204]: DEBUG nova.scheduler.client.report [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.517160] env[62204]: DEBUG nova.network.neutron [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 809.527403] env[62204]: DEBUG oslo_vmware.api [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199732, 'name': RemoveSnapshot_Task, 'duration_secs': 1.149439} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.528090] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 809.528356] env[62204]: INFO nova.compute.manager [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 17.36 seconds to snapshot the instance on the hypervisor. [ 809.588101] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199737, 'name': Rename_Task, 'duration_secs': 0.28991} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.594448] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 809.594862] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f19b2d8-f3f0-412e-b99d-f99fbb61b424 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.599252] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.603510] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199738, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.604741] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 809.604741] env[62204]: value = "task-1199740" [ 809.604741] env[62204]: _type = "Task" [ 809.604741] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.615536] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.660716] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.667372] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1121b1b8-127e-475f-8dfc-de43911de39a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.584s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.715026] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e63b4c-01f1-3df0-d6a0-037ea38c91d9, 'name': SearchDatastore_Task, 'duration_secs': 0.020425} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.715026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.715184] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 809.715432] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.715718] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.715954] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.718028] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9010a378-e663-437f-affe-ecf43e4adb7c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.727775] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.729098] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 809.730946] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d638032-0af5-47b6-b771-ebfc9416081c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.737016] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 809.737016] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f72b3-9c3d-ba4e-6349-ffac9d6e5a49" [ 809.737016] env[62204]: _type = "Task" [ 809.737016] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.747645] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f72b3-9c3d-ba4e-6349-ffac9d6e5a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.946338] env[62204]: DEBUG nova.network.neutron [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Updating instance_info_cache with network_info: [{"id": "e8eedf25-892e-4ebf-94d9-22b836abd37c", "address": "fa:16:3e:65:1c:03", "network": {"id": "c0b91e36-4d22-40be-aefb-f7c5045915ab", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-965240895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15e3c58a34dd4cf69b49d31dc0ef9244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8eedf25-89", "ovs_interfaceid": "e8eedf25-892e-4ebf-94d9-22b836abd37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.961223] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.962136] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 809.965969] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.021s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.967913] env[62204]: INFO nova.compute.claims [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.026181] env[62204]: DEBUG nova.network.neutron [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updated VIF entry in instance network info cache for port 4e81e820-357d-4b7e-900f-aaac4c7c2798. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 810.027020] env[62204]: DEBUG nova.network.neutron [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.098320] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199738, 'name': CreateSnapshot_Task, 'duration_secs': 0.942629} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.099319] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 810.099488] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15eda2c6-cfad-4744-9377-ba36ce14dd36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.121087] env[62204]: DEBUG nova.compute.manager [None req-33913816-81aa-4853-983a-4dfe88ef0c26 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Found 2 images (rotation: 2) {{(pid=62204) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 810.127077] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199740, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.164146] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.170179] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 810.249983] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529f72b3-9c3d-ba4e-6349-ffac9d6e5a49, 'name': SearchDatastore_Task, 'duration_secs': 0.012122} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.250984] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87baac53-b0c1-4050-95f6-847cb1ebe2fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.258256] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 810.258256] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248b233-9109-4ebe-0c4a-2d2ec331460f" [ 810.258256] env[62204]: _type = "Task" [ 810.258256] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.268623] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248b233-9109-4ebe-0c4a-2d2ec331460f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.448973] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Releasing lock "refresh_cache-60eaec9c-5dcc-4e2f-9649-78acba318a6b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.449907] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Instance network_info: |[{"id": "e8eedf25-892e-4ebf-94d9-22b836abd37c", "address": "fa:16:3e:65:1c:03", "network": {"id": "c0b91e36-4d22-40be-aefb-f7c5045915ab", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-965240895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15e3c58a34dd4cf69b49d31dc0ef9244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8eedf25-89", "ovs_interfaceid": "e8eedf25-892e-4ebf-94d9-22b836abd37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 810.449907] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:1c:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16f09e8c-5240-4839-80cc-62ec29700bd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8eedf25-892e-4ebf-94d9-22b836abd37c', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 810.457996] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Creating folder: Project (15e3c58a34dd4cf69b49d31dc0ef9244). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.459420] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07fcac11-3b45-4dac-ab10-f06cdef8ed4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.469183] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Created folder: Project (15e3c58a34dd4cf69b49d31dc0ef9244) in parent group-v259933. [ 810.469414] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Creating folder: Instances. Parent ref: group-v260040. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.469821] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26950d24-7f93-4fda-9fdb-04ca4c84b920 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.472609] env[62204]: DEBUG nova.compute.utils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 810.476177] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 810.476177] env[62204]: DEBUG nova.network.neutron [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 810.489272] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Created folder: Instances in parent group-v260040. [ 810.489699] env[62204]: DEBUG oslo.service.loopingcall [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 810.490037] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 810.490703] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff94f5a0-5d9d-4100-858f-85ef1d32fa9a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.511919] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 810.511919] env[62204]: value = "task-1199745" [ 810.511919] env[62204]: _type = "Task" [ 810.511919] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.520788] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199745, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.529603] env[62204]: DEBUG oslo_concurrency.lockutils [req-df49e6c4-6370-442f-9944-6990072ab402 req-f3e6a2ca-83f1-4127-b20c-b61854fd0828 service nova] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.591825] env[62204]: DEBUG nova.policy [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd17709d694e840d796ba4fca7d6d08f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b28641aa01450b8ad70dc121642f79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 810.618142] env[62204]: DEBUG oslo_vmware.api [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199740, 'name': PowerOnVM_Task, 'duration_secs': 0.673783} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.618142] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 810.618142] env[62204]: INFO nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Took 8.73 seconds to spawn the instance on the hypervisor. [ 810.618142] env[62204]: DEBUG nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 810.618553] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b5cedc-af77-4054-9e8f-6dde886da593 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.629593] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 810.631963] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-029fe651-6eee-444c-bb57-65b4e178f664 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.643627] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 810.643627] env[62204]: value = "task-1199746" [ 810.643627] env[62204]: _type = "Task" [ 810.643627] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.652736] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199746, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.660909] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.702197] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.771224] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248b233-9109-4ebe-0c4a-2d2ec331460f, 'name': SearchDatastore_Task, 'duration_secs': 0.023846} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.772031] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.772031] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc/57e14d47-1d3f-4fed-93c1-11cfc17dc9bc.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.772478] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-657ab990-d862-4a60-b841-105312e70bf1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.784377] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 810.784377] env[62204]: value = "task-1199747" [ 810.784377] env[62204]: _type = "Task" [ 810.784377] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.799195] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.984097] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 811.033172] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199745, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.040632] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.040879] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.069323] env[62204]: DEBUG nova.network.neutron [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Successfully created port: 5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.160997] env[62204]: INFO nova.compute.manager [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Took 41.29 seconds to build instance. [ 811.172879] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199746, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.178778] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.295919] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199747, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.481283] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d9433c-c962-487e-9a05-a970634e691d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.499722] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244d72ef-c0e9-4d9f-80c2-47060b72aeea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.544038] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1caf18bf-8ce0-4010-b95b-8368e57f5be6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.556025] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199745, 'name': CreateVM_Task, 'duration_secs': 0.542107} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.556644] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 811.557498] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.557710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.558073] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 811.559453] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17722d9-9166-4dc1-bdb6-87c4c8f653d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.564307] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38d31423-3e4a-4fbd-9aab-db0bf2266593 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.577959] env[62204]: DEBUG nova.compute.provider_tree [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 811.581024] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 811.581024] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52646292-d9ac-bd29-9dc3-d84c6689a43f" [ 811.581024] env[62204]: _type = "Task" [ 811.581024] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.590556] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52646292-d9ac-bd29-9dc3-d84c6689a43f, 'name': SearchDatastore_Task, 'duration_secs': 0.016233} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.591113] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.591603] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 811.591684] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.591782] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.591974] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.592553] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ab99b05-a116-44dc-bd70-e06d7a209f81 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.604065] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.604276] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 811.604969] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06233772-8dc3-4c23-aaa0-d3b77f781e8b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.610924] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 811.610924] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5253ee4e-84fb-34ba-ac41-93f59ccae1f1" [ 811.610924] env[62204]: _type = "Task" [ 811.610924] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.620141] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5253ee4e-84fb-34ba-ac41-93f59ccae1f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.657890] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199746, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.672228] env[62204]: DEBUG oslo_vmware.api [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199725, 'name': ReconfigVM_Task, 'duration_secs': 5.824934} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.672556] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.672818] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfigured VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 811.675360] env[62204]: DEBUG oslo_concurrency.lockutils [None req-22c3998a-1d6e-4ccc-8c01-f3d31470d948 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.566s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.715905] env[62204]: DEBUG nova.compute.manager [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 811.720171] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a359c38-c420-4073-8b1a-c541a160778c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.728731] env[62204]: DEBUG nova.compute.manager [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Received event network-vif-plugged-e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.729092] env[62204]: DEBUG oslo_concurrency.lockutils [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] Acquiring lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.729506] env[62204]: DEBUG oslo_concurrency.lockutils [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.729666] env[62204]: DEBUG oslo_concurrency.lockutils [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.729854] env[62204]: DEBUG nova.compute.manager [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] No waiting events found dispatching network-vif-plugged-e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 811.730050] env[62204]: WARNING nova.compute.manager [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Received unexpected event network-vif-plugged-e8eedf25-892e-4ebf-94d9-22b836abd37c for instance with vm_state building and task_state spawning. [ 811.730278] env[62204]: DEBUG nova.compute.manager [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Received event network-changed-e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.730446] env[62204]: DEBUG nova.compute.manager [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Refreshing instance network info cache due to event network-changed-e8eedf25-892e-4ebf-94d9-22b836abd37c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 811.730956] env[62204]: DEBUG oslo_concurrency.lockutils [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] Acquiring lock "refresh_cache-60eaec9c-5dcc-4e2f-9649-78acba318a6b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.731222] env[62204]: DEBUG oslo_concurrency.lockutils [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] Acquired lock "refresh_cache-60eaec9c-5dcc-4e2f-9649-78acba318a6b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.731334] env[62204]: DEBUG nova.network.neutron [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Refreshing network info cache for port e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 811.795785] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751358} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.796102] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc/57e14d47-1d3f-4fed-93c1-11cfc17dc9bc.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.796329] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.796664] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-718861f4-ad2a-4549-b6cf-63b40a54ca0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.803505] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 811.803505] env[62204]: value = "task-1199749" [ 811.803505] env[62204]: _type = "Task" [ 811.803505] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.812264] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.000685] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 812.035212] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 812.035548] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 812.035848] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.036114] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 812.036334] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.036516] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 812.036838] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 812.037084] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 812.037321] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 812.037515] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 812.037745] env[62204]: DEBUG nova.virt.hardware [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.038792] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cc9343-b88b-49d7-a42d-e9dc8a5e77e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.052869] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b407d126-1d44-4849-892c-3c3492dc108c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.111282] env[62204]: ERROR nova.scheduler.client.report [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [req-1e94ff9c-436e-4365-b186-67add4507bf5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1e94ff9c-436e-4365-b186-67add4507bf5"}]} [ 812.128966] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5253ee4e-84fb-34ba-ac41-93f59ccae1f1, 'name': SearchDatastore_Task, 'duration_secs': 0.015512} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.129855] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b22dc62-1e4e-4d0b-8c28-de48bec1e204 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.135752] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 812.135752] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5220f83d-7c57-0a7b-74eb-51a6b2d036fe" [ 812.135752] env[62204]: _type = "Task" [ 812.135752] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.136670] env[62204]: DEBUG nova.scheduler.client.report [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Refreshing inventories for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 812.149991] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5220f83d-7c57-0a7b-74eb-51a6b2d036fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.159118] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199746, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.167655] env[62204]: DEBUG nova.scheduler.client.report [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updating ProviderTree inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 812.167655] env[62204]: DEBUG nova.compute.provider_tree [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.169500] env[62204]: INFO nova.compute.manager [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Rebuilding instance [ 812.179018] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 812.189376] env[62204]: DEBUG nova.scheduler.client.report [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Refreshing aggregate associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, aggregates: None {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 812.206177] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c267-baf1-7f9c-e9fe-f91438c12cf5/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 812.207367] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3b76b5-16c3-4baa-a322-6599fbec6cfe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.211103] env[62204]: DEBUG nova.scheduler.client.report [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Refreshing trait associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 812.219053] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c267-baf1-7f9c-e9fe-f91438c12cf5/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 812.219268] env[62204]: ERROR oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c267-baf1-7f9c-e9fe-f91438c12cf5/disk-0.vmdk due to incomplete transfer. [ 812.221650] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f1478ce7-84a8-4349-900a-d8ca4f4237e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.223520] env[62204]: DEBUG nova.compute.manager [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 812.224351] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e351e08-3a83-40cd-b313-03d9f77c567a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.240598] env[62204]: DEBUG oslo_vmware.rw_handles [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5290c267-baf1-7f9c-e9fe-f91438c12cf5/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 812.241014] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Uploaded image c2930f97-8cc3-4e0b-b082-ac3975f12ee6 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 812.242564] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 812.244851] env[62204]: INFO nova.compute.manager [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] instance snapshotting [ 812.248427] env[62204]: DEBUG nova.objects.instance [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'flavor' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 812.248427] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f19247e3-6fc7-4e64-a2fb-c201d3ad33bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.254321] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 812.254321] env[62204]: value = "task-1199750" [ 812.254321] env[62204]: _type = "Task" [ 812.254321] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.269362] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199750, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.317085] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089105} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.317424] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.318563] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9769a516-79ce-4d16-b16e-72a382051653 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.344885] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc/57e14d47-1d3f-4fed-93c1-11cfc17dc9bc.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.344885] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b2cf4ce-ec83-4383-80ed-1ee519467e99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.369670] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 812.369670] env[62204]: value = "task-1199751" [ 812.369670] env[62204]: _type = "Task" [ 812.369670] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.381315] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199751, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.648123] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5220f83d-7c57-0a7b-74eb-51a6b2d036fe, 'name': SearchDatastore_Task, 'duration_secs': 0.017212} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.648432] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.648698] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 60eaec9c-5dcc-4e2f-9649-78acba318a6b/60eaec9c-5dcc-4e2f-9649-78acba318a6b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 812.648966] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6a94c30-59cf-4755-9894-6369c5734700 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.659512] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199746, 'name': CloneVM_Task, 'duration_secs': 1.724261} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.660884] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Created linked-clone VM from snapshot [ 812.661250] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 812.661250] env[62204]: value = "task-1199752" [ 812.661250] env[62204]: _type = "Task" [ 812.661250] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.664926] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3fb81a-e55b-4705-a67d-614f27b8773b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.678510] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Uploading image 4801d6e4-f18a-4d32-b037-0f23254b78b2 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 812.687352] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.704834] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 812.704834] env[62204]: value = "vm-260043" [ 812.704834] env[62204]: _type = "VirtualMachine" [ 812.704834] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 812.705131] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-336e0ecf-9726-4842-b419-be68f8efcb7a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.707669] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.710637] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554a7434-5991-4daf-8c18-2cdb19d17226 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.713772] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lease: (returnval){ [ 812.713772] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529dacf8-ce23-25fa-f337-b06f3520377d" [ 812.713772] env[62204]: _type = "HttpNfcLease" [ 812.713772] env[62204]: } obtained for exporting VM: (result){ [ 812.713772] env[62204]: value = "vm-260043" [ 812.713772] env[62204]: _type = "VirtualMachine" [ 812.713772] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 812.714048] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the lease: (returnval){ [ 812.714048] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529dacf8-ce23-25fa-f337-b06f3520377d" [ 812.714048] env[62204]: _type = "HttpNfcLease" [ 812.714048] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 812.719990] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0dca9a-4a0d-468b-a6d9-9158739cceb1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.725530] env[62204]: DEBUG nova.network.neutron [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Updated VIF entry in instance network info cache for port e8eedf25-892e-4ebf-94d9-22b836abd37c. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 812.725920] env[62204]: DEBUG nova.network.neutron [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Updating instance_info_cache with network_info: [{"id": "e8eedf25-892e-4ebf-94d9-22b836abd37c", "address": "fa:16:3e:65:1c:03", "network": {"id": "c0b91e36-4d22-40be-aefb-f7c5045915ab", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-965240895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15e3c58a34dd4cf69b49d31dc0ef9244", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16f09e8c-5240-4839-80cc-62ec29700bd2", "external-id": "nsx-vlan-transportzone-720", "segmentation_id": 720, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8eedf25-89", "ovs_interfaceid": "e8eedf25-892e-4ebf-94d9-22b836abd37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.729997] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 812.729997] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529dacf8-ce23-25fa-f337-b06f3520377d" [ 812.729997] env[62204]: _type = "HttpNfcLease" [ 812.729997] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 812.754766] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 812.754766] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529dacf8-ce23-25fa-f337-b06f3520377d" [ 812.754766] env[62204]: _type = "HttpNfcLease" [ 812.754766] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 812.759233] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.760410] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4166adbd-8206-4eab-97ae-f6f72b3c5b4f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.763370] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baaf6be7-d3cd-4d5b-beec-f29f4a017f7c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.765416] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d54d5f-decf-4de7-8de7-536a19d4b473 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.772264] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04a6638-03d0-45a7-8f48-da15a8239924 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.785689] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281d8f4-9b48-b76e-5d3e-cd8a5ea2d8c8/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 812.786217] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281d8f4-9b48-b76e-5d3e-cd8a5ea2d8c8/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 812.793919] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5434355-5b1e-42ea-9232-b41306bbfcbd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.798144] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 812.798144] env[62204]: value = "task-1199754" [ 812.798144] env[62204]: _type = "Task" [ 812.798144] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.798431] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199750, 'name': Destroy_Task, 'duration_secs': 0.444201} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.800180] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Destroyed the VM [ 812.800563] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 812.863230] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f0cbc43f-2845-4e55-8fbb-8b309eac0be6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.890860] env[62204]: DEBUG nova.compute.provider_tree [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.899319] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91867e7-f33f-4b1b-8e12-66ee3f4be439 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.908017] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.908411] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 812.908411] env[62204]: value = "task-1199755" [ 812.908411] env[62204]: _type = "Task" [ 812.908411] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.917602] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199751, 'name': ReconfigVM_Task, 'duration_secs': 0.444474} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.920787] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc/57e14d47-1d3f-4fed-93c1-11cfc17dc9bc.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.921979] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e7ea04b-b6a5-42ca-be8f-1d0e615ac0a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.927320] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199755, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.933412] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 812.933412] env[62204]: value = "task-1199756" [ 812.933412] env[62204]: _type = "Task" [ 812.933412] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.943440] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199756, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.960087] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-be1b5fa1-9f41-4d52-a16e-1ea2c6e0f9b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.091436] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.091806] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.091964] env[62204]: DEBUG nova.network.neutron [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.168029] env[62204]: DEBUG nova.network.neutron [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Successfully updated port: 5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 813.185519] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199752, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.216727] env[62204]: DEBUG nova.compute.manager [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-vif-deleted-1eef631c-fa58-4ad5-862e-c25b847eaa2c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 813.217028] env[62204]: INFO nova.compute.manager [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Neutron deleted interface 1eef631c-fa58-4ad5-862e-c25b847eaa2c; detaching it from the instance and deleting it from the info cache [ 813.217383] env[62204]: DEBUG nova.network.neutron [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "address": "fa:16:3e:36:c2:b3", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33e67759-b1", "ovs_interfaceid": "33e67759-b1fb-4395-9ed1-bf2102c8d3ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.255189] env[62204]: DEBUG oslo_concurrency.lockutils [req-0a96a282-a7af-4db7-9370-a29803aa9ec4 req-0f46dc14-5f7c-44f1-9e3d-98887a7a32d3 service nova] Releasing lock "refresh_cache-60eaec9c-5dcc-4e2f-9649-78acba318a6b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.313772] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199754, 'name': PowerOffVM_Task, 'duration_secs': 0.287302} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.314575] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.314878] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.315946] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3aac58-0e30-41d5-95a5-a12c0886f8aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.324768] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.325229] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-80856ffd-3975-4020-a90a-bb4086f9d580 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.404042] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.404042] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.404260] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleting the datastore file [datastore2] 2c393123-87de-460a-965d-43473478a79f {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.404561] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78afb192-2a4a-42b4-82e7-1b7b28c6cc62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.412055] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 813.412055] env[62204]: value = "task-1199758" [ 813.412055] env[62204]: _type = "Task" [ 813.412055] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.427657] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 813.428253] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199755, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.428676] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199758, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.428991] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-505f9ce4-2aa9-44c8-8d19-64a696a0292a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.431931] env[62204]: DEBUG nova.scheduler.client.report [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 813.432250] env[62204]: DEBUG nova.compute.provider_tree [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 87 to 88 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 813.432506] env[62204]: DEBUG nova.compute.provider_tree [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 813.441261] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 813.441261] env[62204]: value = "task-1199759" [ 813.441261] env[62204]: _type = "Task" [ 813.441261] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.449228] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199756, 'name': Rename_Task, 'duration_secs': 0.387391} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.449228] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 813.449428] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fde8f10-81d7-4bfe-abd4-77d359c2317e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.456986] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199759, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.463142] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 813.463142] env[62204]: value = "task-1199760" [ 813.463142] env[62204]: _type = "Task" [ 813.463142] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.472387] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199760, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.678786] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.678786] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.678786] env[62204]: DEBUG nova.network.neutron [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.687216] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199752, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573289} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.687216] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 60eaec9c-5dcc-4e2f-9649-78acba318a6b/60eaec9c-5dcc-4e2f-9649-78acba318a6b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 813.687216] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 813.687216] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b3fcdc9-3276-45bc-a7fd-f688474a3e50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.697026] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 813.697026] env[62204]: value = "task-1199761" [ 813.697026] env[62204]: _type = "Task" [ 813.697026] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.707674] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.720622] env[62204]: DEBUG oslo_concurrency.lockutils [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.720990] env[62204]: DEBUG oslo_concurrency.lockutils [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Acquired lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.721942] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf22eb33-d1fb-4def-8674-6ee87facfe9c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.747332] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab7002d-3691-4a1b-aa1b-7429cc101063 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.773270] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.781652] env[62204]: DEBUG nova.virt.vmwareapi.vmops [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfiguring VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 813.782606] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae3dcdef-05a2-4bd0-ada1-887d71f2da69 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.803782] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Waiting for the task: (returnval){ [ 813.803782] env[62204]: value = "task-1199762" [ 813.803782] env[62204]: _type = "Task" [ 813.803782] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.813985] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.922454] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199755, 'name': RemoveSnapshot_Task, 'duration_secs': 0.742092} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.923154] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 813.923788] env[62204]: DEBUG nova.compute.manager [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 813.924380] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf14ddd-45ea-4c93-828a-b8d2496130e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.927831] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 813.928083] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260038', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'name': 'volume-36c27753-d664-470d-98bd-effeeada2008', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4793e9fd-be87-4885-8f0e-1fcef6ce4d2f', 'attached_at': '', 'detached_at': '', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'serial': '36c27753-d664-470d-98bd-effeeada2008'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 813.932226] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5750725a-2332-4225-947d-56678c4e019c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.935145] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199758, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.363628} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.935915] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 813.936189] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 813.936469] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 813.942352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.976s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.942844] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 813.959810] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.149s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.960151] env[62204]: DEBUG nova.objects.instance [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lazy-loading 'resources' on Instance uuid d6370e37-6f73-4334-8057-a30aa2c39682 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 813.965516] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecac92e-0027-4c35-91a0-65488f61ecd1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.977517] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199759, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.999065] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199760, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.008366] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] volume-36c27753-d664-470d-98bd-effeeada2008/volume-36c27753-d664-470d-98bd-effeeada2008.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 814.011563] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82c5a68e-f6be-43d9-a3b5-b5e72cb9968f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.035121] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 814.035121] env[62204]: value = "task-1199763" [ 814.035121] env[62204]: _type = "Task" [ 814.035121] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.043755] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199763, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.208032] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.140174} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.208032] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 814.208032] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974f180b-de52-466a-b0cd-63358d1dae40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.231582] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 60eaec9c-5dcc-4e2f-9649-78acba318a6b/60eaec9c-5dcc-4e2f-9649-78acba318a6b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 814.232098] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ebd8a67-4263-46a1-b60b-5897dd84a865 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.249332] env[62204]: DEBUG nova.network.neutron [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.261530] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 814.261530] env[62204]: value = "task-1199764" [ 814.261530] env[62204]: _type = "Task" [ 814.261530] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.277501] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199764, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.315210] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.355100] env[62204]: INFO nova.network.neutron [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Port 33e67759-b1fb-4395-9ed1-bf2102c8d3ee from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 814.355363] env[62204]: INFO nova.network.neutron [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Port 1eef631c-fa58-4ad5-862e-c25b847eaa2c from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 814.355801] env[62204]: DEBUG nova.network.neutron [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.378077] env[62204]: DEBUG nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-vif-deleted-33e67759-b1fb-4395-9ed1-bf2102c8d3ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 814.378277] env[62204]: INFO nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Neutron deleted interface 33e67759-b1fb-4395-9ed1-bf2102c8d3ee; detaching it from the instance and deleting it from the info cache [ 814.378550] env[62204]: DEBUG nova.network.neutron [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [{"id": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "address": "fa:16:3e:d3:54:e8", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81069ae-b3", "ovs_interfaceid": "c81069ae-b3b9-4b0d-902f-ed9a2e24542f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.463489] env[62204]: DEBUG nova.compute.utils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.474291] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 814.475404] env[62204]: DEBUG nova.network.neutron [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 814.482032] env[62204]: INFO nova.compute.manager [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Shelve offloading [ 814.487449] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.488639] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e35abae0-a1a8-4b36-886f-5e47630e8671 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.496668] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199759, 'name': CreateSnapshot_Task, 'duration_secs': 0.964393} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.499822] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 814.500245] env[62204]: DEBUG oslo_vmware.api [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199760, 'name': PowerOnVM_Task, 'duration_secs': 0.763916} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.502797] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a211a9ba-241f-4d26-9b4e-32d2e97ab4ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.505908] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 814.506623] env[62204]: INFO nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Took 9.99 seconds to spawn the instance on the hypervisor. [ 814.506623] env[62204]: DEBUG nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 814.506821] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 814.506821] env[62204]: value = "task-1199765" [ 814.506821] env[62204]: _type = "Task" [ 814.506821] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.508675] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3ef480-f463-4e45-9d04-33ac45a80e7c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.546491] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 814.546734] env[62204]: DEBUG nova.compute.manager [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 814.550552] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea306fe-b332-420d-a42c-c768fd94e3cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.561024] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199763, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.563262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.563262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.563262] env[62204]: DEBUG nova.network.neutron [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 814.575476] env[62204]: DEBUG nova.network.neutron [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updating instance_info_cache with network_info: [{"id": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "address": "fa:16:3e:32:08:82", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af1ae4e-3a", "ovs_interfaceid": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.585533] env[62204]: DEBUG nova.policy [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd17709d694e840d796ba4fca7d6d08f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b28641aa01450b8ad70dc121642f79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 814.756728] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "67ee5c4d-3825-4580-a26e-74eb8da50883" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.757098] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.757287] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "67ee5c4d-3825-4580-a26e-74eb8da50883-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.757512] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.757949] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.760138] env[62204]: INFO nova.compute.manager [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Terminating instance [ 814.765414] env[62204]: DEBUG nova.compute.manager [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 814.765726] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.766680] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558c4b21-12fd-4307-a76b-db86235c86aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.778429] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199764, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.779894] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.780908] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-810eeeac-38a4-48b7-b9d4-deb54d52e0be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.787394] env[62204]: DEBUG oslo_vmware.api [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 814.787394] env[62204]: value = "task-1199766" [ 814.787394] env[62204]: _type = "Task" [ 814.787394] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.799494] env[62204]: DEBUG oslo_vmware.api [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.817040] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.862051] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.882540] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.936564] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2efa7b5e-6fc7-42a6-84e9-9b1303a87eb1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.946900] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cf4785-1e5b-497a-844f-a42e2b42d9b8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.992329] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 815.000220] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb36e28-bf28-4ca1-b5bd-9ab000c26b30 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.009730] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ae6e3b-4c9b-47e8-b59c-a9c81763067c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.026720] env[62204]: DEBUG nova.compute.provider_tree [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 815.036180] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 815.036456] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 815.036625] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.036814] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 815.036962] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.037130] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 815.037345] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 815.037507] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 815.037698] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 815.038047] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 815.038141] env[62204]: DEBUG nova.virt.hardware [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 815.039630] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79620f01-d434-4aa1-9fb9-df67d6f43e03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.049044] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 815.056596] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9030bb6a-efb9-4816-9c8a-5d2e303c3684 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.065296] env[62204]: INFO nova.compute.manager [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Took 42.81 seconds to build instance. [ 815.068740] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc891af6-0bf0-41dd-a288-c78218c11bab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.079735] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 815.079735] env[62204]: value = "task-1199767" [ 815.079735] env[62204]: _type = "Task" [ 815.079735] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.080162] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.080582] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Instance network_info: |[{"id": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "address": "fa:16:3e:32:08:82", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af1ae4e-3a", "ovs_interfaceid": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 815.080754] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199763, 'name': ReconfigVM_Task, 'duration_secs': 0.719904} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.082145] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:08:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5af1ae4e-3a58-4d76-854a-59ac01168a4c', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.090285] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating folder: Project (43b28641aa01450b8ad70dc121642f79). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 815.090636] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Reconfigured VM instance instance-00000036 to attach disk [datastore2] volume-36c27753-d664-470d-98bd-effeeada2008/volume-36c27753-d664-470d-98bd-effeeada2008.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 815.107286] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13ba5f85-5d0e-46ce-8c9f-13c2326312ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.108913] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7ebe5ce-3fbf-4098-9a44-c265b1945a71 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.120320] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:e6:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a6c2acb-ed90-490d-b4c8-9ab8037a80d2', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 815.127246] env[62204]: DEBUG oslo.service.loopingcall [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.128341] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.129767] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44d36831-df12-48c3-b9bf-39a961d9c54d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.157370] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 815.157370] env[62204]: value = "task-1199769" [ 815.157370] env[62204]: _type = "Task" [ 815.157370] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.157747] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created folder: Project (43b28641aa01450b8ad70dc121642f79) in parent group-v259933. [ 815.157970] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating folder: Instances. Parent ref: group-v260046. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 815.158291] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 23%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.159308] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-127aacd8-12ef-45f1-bdd8-8303dec8ae76 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.166983] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.166983] env[62204]: value = "task-1199770" [ 815.166983] env[62204]: _type = "Task" [ 815.166983] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.174743] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.180067] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created folder: Instances in parent group-v260046. [ 815.180067] env[62204]: DEBUG oslo.service.loopingcall [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.180067] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 815.180067] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a1fee49-d341-4e9f-bb99-f7187511e8e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.200514] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199770, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.202718] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 815.202718] env[62204]: value = "task-1199772" [ 815.202718] env[62204]: _type = "Task" [ 815.202718] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.211948] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199772, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.282028] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199764, 'name': ReconfigVM_Task, 'duration_secs': 0.538133} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.282634] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 60eaec9c-5dcc-4e2f-9649-78acba318a6b/60eaec9c-5dcc-4e2f-9649-78acba318a6b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 815.284752] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74b8b38d-dff7-47e4-85aa-3b79e7152372 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.297914] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 815.297914] env[62204]: value = "task-1199773" [ 815.297914] env[62204]: _type = "Task" [ 815.297914] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.306598] env[62204]: DEBUG oslo_vmware.api [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199766, 'name': PowerOffVM_Task, 'duration_secs': 0.188208} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.307509] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 815.307698] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 815.311284] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38f70492-7a5c-46b0-86b0-6f56150ebdd9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.316944] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199773, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.323450] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.356842] env[62204]: DEBUG nova.network.neutron [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Successfully created port: 06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.367525] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7f82216-0450-446b-87af-c772cc90256c tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-a71fd192-f3b6-4f0f-900d-887d15f44d7a-33e67759-b1fb-4395-9ed1-bf2102c8d3ee" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.319s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.394442] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 815.394700] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 815.394927] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleting the datastore file [datastore1] 67ee5c4d-3825-4580-a26e-74eb8da50883 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 815.398366] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ecfb6a5-86dd-44a5-bcd2-3f027ea143f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.405520] env[62204]: DEBUG oslo_vmware.api [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 815.405520] env[62204]: value = "task-1199775" [ 815.405520] env[62204]: _type = "Task" [ 815.405520] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.416893] env[62204]: DEBUG oslo_vmware.api [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.569598] env[62204]: DEBUG nova.scheduler.client.report [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 88 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 815.569852] env[62204]: DEBUG nova.compute.provider_tree [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 88 to 89 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 815.570020] env[62204]: DEBUG nova.compute.provider_tree [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 815.577924] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1194308f-e8f7-4b9e-8220-3af96f7ae248 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.963s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.593495] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.670134] env[62204]: DEBUG oslo_vmware.api [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199769, 'name': ReconfigVM_Task, 'duration_secs': 0.202512} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.674110] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260038', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'name': 'volume-36c27753-d664-470d-98bd-effeeada2008', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4793e9fd-be87-4885-8f0e-1fcef6ce4d2f', 'attached_at': '', 'detached_at': '', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'serial': '36c27753-d664-470d-98bd-effeeada2008'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 815.680963] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199770, 'name': CreateVM_Task, 'duration_secs': 0.448938} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.681268] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 815.681954] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.682205] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.682521] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 815.682913] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02119601-811b-41ba-8a78-b72b8e10a464 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.688247] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 815.688247] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dacb9a-1075-a645-8cf7-2d8ec4cbfda8" [ 815.688247] env[62204]: _type = "Task" [ 815.688247] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.699125] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dacb9a-1075-a645-8cf7-2d8ec4cbfda8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.713822] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199772, 'name': CreateVM_Task, 'duration_secs': 0.418452} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.713996] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 815.714675] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.807875] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199773, 'name': Rename_Task, 'duration_secs': 0.247152} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.811814] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 815.812829] env[62204]: DEBUG nova.network.neutron [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.814016] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a58b103b-a85d-40fd-8557-dd6878107166 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.824108] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.826280] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 815.826280] env[62204]: value = "task-1199776" [ 815.826280] env[62204]: _type = "Task" [ 815.826280] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.839299] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.919047] env[62204]: DEBUG oslo_vmware.api [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291342} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.919450] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.919732] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.920015] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.920304] env[62204]: INFO nova.compute.manager [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Took 1.15 seconds to destroy the instance on the hypervisor. [ 815.920659] env[62204]: DEBUG oslo.service.loopingcall [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 815.920937] env[62204]: DEBUG nova.compute.manager [-] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 815.921090] env[62204]: DEBUG nova.network.neutron [-] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 816.007924] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 816.040498] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 816.040812] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 816.043886] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.044159] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 816.044261] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.044424] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 816.044773] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 816.044962] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 816.045166] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 816.045388] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 816.045617] env[62204]: DEBUG nova.virt.hardware [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.046589] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f340ed6e-eaee-4833-a60d-3132fc8037e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.057176] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f791e6-618d-49d0-be19-6f3401f6a494 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.088282] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.092554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.984s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.096026] env[62204]: INFO nova.compute.claims [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.097917] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 816.116352] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.127085] env[62204]: INFO nova.scheduler.client.report [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Deleted allocations for instance d6370e37-6f73-4334-8057-a30aa2c39682 [ 816.201378] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dacb9a-1075-a645-8cf7-2d8ec4cbfda8, 'name': SearchDatastore_Task, 'duration_secs': 0.026106} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.201505] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.201780] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.202292] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.202292] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.202528] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.202999] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.203383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 816.203639] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e3f7a51-ec09-402f-b1f0-3919d4aae2a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.205925] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57652217-ff6c-4aa7-99ce-9e1a16d31db1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.212220] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 816.212220] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520b7fe7-7174-5fe5-2f42-44918684cf41" [ 816.212220] env[62204]: _type = "Task" [ 816.212220] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.216839] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.218366] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.218566] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a60f4faf-3793-4cea-a1a3-1ebfbc94d393 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.225653] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520b7fe7-7174-5fe5-2f42-44918684cf41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.230227] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 816.230227] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d5cddd-ce18-51dd-d35c-a9274572e8e2" [ 816.230227] env[62204]: _type = "Task" [ 816.230227] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.238078] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d5cddd-ce18-51dd-d35c-a9274572e8e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.324124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.325853] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.341309] env[62204]: DEBUG oslo_vmware.api [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199776, 'name': PowerOnVM_Task, 'duration_secs': 0.513072} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.341309] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 816.341309] env[62204]: INFO nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Took 9.17 seconds to spawn the instance on the hypervisor. [ 816.341309] env[62204]: DEBUG nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 816.341309] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8115179-ea5b-46b5-890b-d29fb1586b6e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.597117] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.634706] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.635517] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e2cbf83-5e25-4de2-8a22-28c86bef4433 tempest-ServerPasswordTestJSON-1312728835 tempest-ServerPasswordTestJSON-1312728835-project-member] Lock "d6370e37-6f73-4334-8057-a30aa2c39682" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.277s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.725043] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520b7fe7-7174-5fe5-2f42-44918684cf41, 'name': SearchDatastore_Task, 'duration_secs': 0.019332} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.725371] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.725651] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.726924] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.727083] env[62204]: DEBUG nova.objects.instance [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 816.739639] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d5cddd-ce18-51dd-d35c-a9274572e8e2, 'name': SearchDatastore_Task, 'duration_secs': 0.023815} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.741078] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b57a4aee-223a-4bb3-a2cb-a4cb15af9eb7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.747774] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 816.747774] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526cfa95-79c1-cad2-626a-ec219063c21a" [ 816.747774] env[62204]: _type = "Task" [ 816.747774] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.756224] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526cfa95-79c1-cad2-626a-ec219063c21a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.772752] env[62204]: DEBUG nova.compute.manager [req-a1eb1b97-300e-4a88-b2f8-79588ebf2e03 req-5989e475-0822-4829-ae33-0b13566517ee service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Received event network-vif-deleted-557947cd-ca92-41e1-a093-6b0f7718f5cd {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 816.772752] env[62204]: INFO nova.compute.manager [req-a1eb1b97-300e-4a88-b2f8-79588ebf2e03 req-5989e475-0822-4829-ae33-0b13566517ee service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Neutron deleted interface 557947cd-ca92-41e1-a093-6b0f7718f5cd; detaching it from the instance and deleting it from the info cache [ 816.772752] env[62204]: DEBUG nova.network.neutron [req-a1eb1b97-300e-4a88-b2f8-79588ebf2e03 req-5989e475-0822-4829-ae33-0b13566517ee service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.818705] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.858556] env[62204]: INFO nova.compute.manager [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Took 35.63 seconds to build instance. [ 816.916088] env[62204]: DEBUG nova.network.neutron [-] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.001055] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.001055] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01ec8a2-79e7-4fb6-b220-cfe600b62a30 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.010986] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 817.011343] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28cda1d7-faf2-4dfd-b68f-113208f4a596 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.103387] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.236286] env[62204]: DEBUG oslo_concurrency.lockutils [None req-05ae7773-c919-4090-b1b5-c2ab340afc7c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.952s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.261179] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526cfa95-79c1-cad2-626a-ec219063c21a, 'name': SearchDatastore_Task, 'duration_secs': 0.022701} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.261320] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.261596] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.262035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.262296] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 817.262505] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdc76b47-6d97-47c4-9403-8b125ec34e55 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.268947] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-853b23ab-57c0-42a2-aa49-912190a53737 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.275824] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 817.275824] env[62204]: value = "task-1199778" [ 817.275824] env[62204]: _type = "Task" [ 817.275824] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.277040] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f4e4177-65bf-4615-9f5a-d0e4f39e2993 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.279198] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 817.279393] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 817.286443] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d735ec0e-3881-4398-bbab-751767b3d93d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.300730] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199778, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.303724] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 817.303724] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b1d0b8-def9-2c62-6144-4e7ace842cc6" [ 817.303724] env[62204]: _type = "Task" [ 817.303724] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.307526] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac74ca7c-8edf-4040-934e-78e7de083daf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.340025] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b1d0b8-def9-2c62-6144-4e7ace842cc6, 'name': SearchDatastore_Task, 'duration_secs': 0.022935} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.342414] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.354429] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e2db8d-7541-41f8-b6b3-c8e56ceab47a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.357127] env[62204]: DEBUG nova.compute.manager [req-a1eb1b97-300e-4a88-b2f8-79588ebf2e03 req-5989e475-0822-4829-ae33-0b13566517ee service nova] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Detach interface failed, port_id=557947cd-ca92-41e1-a093-6b0f7718f5cd, reason: Instance 67ee5c4d-3825-4580-a26e-74eb8da50883 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 817.361043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-156bdf60-b1cf-4a3a-a0d2-8d94a9036e34 tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.821s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.364131] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 817.364131] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a624ca-5bcf-2ae6-fa61-9a27907351df" [ 817.364131] env[62204]: _type = "Task" [ 817.364131] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.373093] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a624ca-5bcf-2ae6-fa61-9a27907351df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.420412] env[62204]: INFO nova.compute.manager [-] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Took 1.50 seconds to deallocate network for instance. [ 817.581649] env[62204]: DEBUG nova.compute.manager [req-8f26c693-22f2-4369-ae5f-12fff79afb43 req-5da4d95e-dae6-4e23-aa89-dc2a8325ae6e service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-vif-unplugged-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 817.581875] env[62204]: DEBUG oslo_concurrency.lockutils [req-8f26c693-22f2-4369-ae5f-12fff79afb43 req-5da4d95e-dae6-4e23-aa89-dc2a8325ae6e service nova] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.585462] env[62204]: DEBUG oslo_concurrency.lockutils [req-8f26c693-22f2-4369-ae5f-12fff79afb43 req-5da4d95e-dae6-4e23-aa89-dc2a8325ae6e service nova] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.585776] env[62204]: DEBUG oslo_concurrency.lockutils [req-8f26c693-22f2-4369-ae5f-12fff79afb43 req-5da4d95e-dae6-4e23-aa89-dc2a8325ae6e service nova] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.004s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.585980] env[62204]: DEBUG nova.compute.manager [req-8f26c693-22f2-4369-ae5f-12fff79afb43 req-5da4d95e-dae6-4e23-aa89-dc2a8325ae6e service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] No waiting events found dispatching network-vif-unplugged-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 817.586187] env[62204]: WARNING nova.compute.manager [req-8f26c693-22f2-4369-ae5f-12fff79afb43 req-5da4d95e-dae6-4e23-aa89-dc2a8325ae6e service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received unexpected event network-vif-unplugged-52d592a0-434a-4f17-8db6-39bf5d505429 for instance with vm_state shelved and task_state shelving_offloading. [ 817.600450] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.651202] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14da3ba3-8693-49c7-99af-63908c2bcec8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.661517] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523eea21-76cd-47f4-b3f0-4ffcb249c1a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.707715] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd785bca-557b-4a00-bfbb-4dfc7b975e2b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.720274] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defccfce-fea8-49bb-8e47-ce6dfded0c57 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.743080] env[62204]: DEBUG nova.compute.provider_tree [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.792503] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199778, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.838293] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.863718] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 817.880550] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a624ca-5bcf-2ae6-fa61-9a27907351df, 'name': SearchDatastore_Task, 'duration_secs': 0.019825} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.880913] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.881130] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/f5f0c15f-ae0d-4615-93ab-3203a5d7e090.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.882602] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0cb4f779-6b18-49b0-9dd0-7608fc49b259 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.892856] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 817.892856] env[62204]: value = "task-1199779" [ 817.892856] env[62204]: _type = "Task" [ 817.892856] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.905596] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.924387] env[62204]: DEBUG nova.network.neutron [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Successfully updated port: 06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.927907] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.100320] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.208397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.208719] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.247846] env[62204]: DEBUG nova.scheduler.client.report [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 818.292034] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199778, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731663} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.292267] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.292499] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.292743] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-106965e5-3802-4281-a44e-9568332cac92 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.300469] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 818.300469] env[62204]: value = "task-1199780" [ 818.300469] env[62204]: _type = "Task" [ 818.300469] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.309930] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.336349] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.403077] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.406248] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199779, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.430348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.430567] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.430724] env[62204]: DEBUG nova.network.neutron [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 818.570793] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 818.571468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.571468] env[62204]: DEBUG nova.compute.manager [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 818.572785] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4fe92c-1e9c-4781-9b96-2b02525e9197 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.584109] env[62204]: DEBUG nova.compute.manager [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 818.584109] env[62204]: DEBUG nova.objects.instance [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.606087] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.753542] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.754246] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 818.759037] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.048s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.814560] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192671} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.821475] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.822575] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4688cb95-648b-4dd8-8cc5-adb04e7a2bf2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.869343] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.878497] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f3ed000-6e40-44c5-82b1-eab2986bf971 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.904255] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.910294] env[62204]: DEBUG nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Received event network-changed-4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 818.910294] env[62204]: DEBUG nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Refreshing instance network info cache due to event network-changed-4e81e820-357d-4b7e-900f-aaac4c7c2798. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 818.910294] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 818.910294] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.910638] env[62204]: DEBUG nova.network.neutron [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Refreshing network info cache for port 4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 818.925785] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 818.925785] env[62204]: value = "task-1199781" [ 818.925785] env[62204]: _type = "Task" [ 818.925785] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.940481] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.840978} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.943445] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/f5f0c15f-ae0d-4615-93ab-3203a5d7e090.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.943977] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.944447] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 818.944752] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 818.945065] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleting the datastore file [datastore2] 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 818.950850] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4b37e52-875d-4dda-a4b6-b795b601a36c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.954115] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5905c195-65d0-4d95-afa5-d5f61bd03187 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.957050] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199781, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.968310] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 818.968310] env[62204]: value = "task-1199782" [ 818.968310] env[62204]: _type = "Task" [ 818.968310] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.970637] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 818.970637] env[62204]: value = "task-1199783" [ 818.970637] env[62204]: _type = "Task" [ 818.970637] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.985069] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.992926] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.010492] env[62204]: DEBUG nova.network.neutron [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 819.093552] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.098504] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31369c8f-c55a-416c-88fd-49e8031d634a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.108739] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.113645] env[62204]: DEBUG oslo_vmware.api [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 819.113645] env[62204]: value = "task-1199784" [ 819.113645] env[62204]: _type = "Task" [ 819.113645] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.126699] env[62204]: DEBUG oslo_vmware.api [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.261902] env[62204]: DEBUG nova.compute.utils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 819.263757] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 819.263953] env[62204]: DEBUG nova.network.neutron [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 819.290137] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ee7d94-2b84-491e-a448-1e1aa3ca0387 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.299197] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c51f69-8e31-4199-b5bb-7fd852f8e59e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.331369] env[62204]: DEBUG nova.policy [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '53cab7fd384749deb5c7f6e3faa03b1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '286b300e98e244eb8693bb0f3174c121', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 819.333487] env[62204]: DEBUG nova.network.neutron [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Updating instance_info_cache with network_info: [{"id": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "address": "fa:16:3e:1d:bf:ce", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06dfadf2-c7", "ovs_interfaceid": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.338159] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3695af-4a2a-47fc-b953-79c4a6fe2cce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.352970] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.354956] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d5880d-f53a-45df-86e4-9f752166d770 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.372568] env[62204]: DEBUG nova.compute.provider_tree [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 819.436083] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199781, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.489035] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.202644} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.492695] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 819.493432] env[62204]: DEBUG oslo_vmware.api [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1199783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.277365} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.496301] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7950fc-3ab9-4722-9211-482e6f6debce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.502019] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 819.502019] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 819.502019] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.529430] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/f5f0c15f-ae0d-4615-93ab-3203a5d7e090.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 819.529430] env[62204]: INFO nova.scheduler.client.report [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted allocations for instance 2727dc46-98ed-435d-89ef-41bc20cda776 [ 819.533749] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0140ebae-fd27-418a-b43d-426c040e8ba7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.556427] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 819.556427] env[62204]: value = "task-1199785" [ 819.556427] env[62204]: _type = "Task" [ 819.556427] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.567422] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.603421] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.624045] env[62204]: DEBUG oslo_vmware.api [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199784, 'name': PowerOffVM_Task, 'duration_secs': 0.378383} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.624402] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 819.624662] env[62204]: DEBUG nova.compute.manager [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 819.625561] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b064351f-9b38-4d85-bc8b-8764cb92b7f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.629036] env[62204]: DEBUG nova.network.neutron [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Successfully created port: 21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.726288] env[62204]: DEBUG nova.compute.manager [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 819.726432] env[62204]: DEBUG nova.compute.manager [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing instance network info cache due to event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 819.726643] env[62204]: DEBUG oslo_concurrency.lockutils [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.726851] env[62204]: DEBUG oslo_concurrency.lockutils [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.727184] env[62204]: DEBUG nova.network.neutron [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 819.767353] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 819.840497] env[62204]: DEBUG oslo_vmware.api [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Task: {'id': task-1199762, 'name': ReconfigVM_Task, 'duration_secs': 5.793064} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.840743] env[62204]: DEBUG oslo_concurrency.lockutils [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] Releasing lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.841040] env[62204]: DEBUG nova.virt.vmwareapi.vmops [req-c234f656-4a58-436f-aedf-736e4a5479b4 req-ba3f6c3f-f9cc-41c6-a374-b42042e2213f service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Reconfigured VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 819.841613] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.069s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.842104] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.842401] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.842638] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.844650] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.845222] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Instance network_info: |[{"id": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "address": "fa:16:3e:1d:bf:ce", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06dfadf2-c7", "ovs_interfaceid": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 819.845808] env[62204]: INFO nova.compute.manager [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Terminating instance [ 819.847392] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:bf:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '06dfadf2-c796-4fd2-a53f-55cb955837a9', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.855346] env[62204]: DEBUG oslo.service.loopingcall [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 819.856100] env[62204]: DEBUG nova.compute.manager [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 819.856418] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.856777] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.857609] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b0c912-ee02-46ab-aec1-edd178a20ed8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.860781] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78938f61-39ea-4b23-b947-7f90d151ff9c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.876924] env[62204]: DEBUG nova.network.neutron [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updated VIF entry in instance network info cache for port 4e81e820-357d-4b7e-900f-aaac4c7c2798. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 819.877448] env[62204]: DEBUG nova.network.neutron [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.886185] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.887601] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-184dc3d4-ccbe-4cbb-b5e8-37ba76408141 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.889506] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.889506] env[62204]: value = "task-1199786" [ 819.889506] env[62204]: _type = "Task" [ 819.889506] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.897193] env[62204]: DEBUG oslo_vmware.api [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 819.897193] env[62204]: value = "task-1199787" [ 819.897193] env[62204]: _type = "Task" [ 819.897193] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.905552] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199786, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.906917] env[62204]: ERROR nova.scheduler.client.report [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [req-976cbfbd-9180-4518-ac8b-db466a428fd9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-976cbfbd-9180-4518-ac8b-db466a428fd9"}]}: nova.exception.BuildAbortException: Build of instance 69604167-6a61-4723-bf7d-7ba168837839 aborted: Failed to rebuild volume backed instance. [ 819.913261] env[62204]: DEBUG oslo_vmware.api [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.941079] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199781, 'name': ReconfigVM_Task, 'duration_secs': 0.616847} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.942093] env[62204]: DEBUG nova.scheduler.client.report [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Refreshing inventories for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 819.944622] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 2c393123-87de-460a-965d-43473478a79f/2c393123-87de-460a-965d-43473478a79f.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.946843] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be7a201c-dc16-41e3-b2ec-34cb4ba9cd41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.953188] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 819.953188] env[62204]: value = "task-1199788" [ 819.953188] env[62204]: _type = "Task" [ 819.953188] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.964868] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199788, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.971583] env[62204]: DEBUG nova.scheduler.client.report [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updating ProviderTree inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 819.972562] env[62204]: DEBUG nova.compute.provider_tree [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 819.992053] env[62204]: DEBUG nova.scheduler.client.report [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Refreshing aggregate associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, aggregates: None {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 820.023657] env[62204]: DEBUG nova.scheduler.client.report [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Refreshing trait associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 820.052154] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.071713] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199785, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.105505] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.141819] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fa0af766-7109-466a-ad7b-9bbae85e0b8b tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.570s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.383325] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.383707] env[62204]: DEBUG nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Received event network-vif-plugged-06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.383939] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Acquiring lock "6dc170a4-b08e-44b5-a152-832670e6866b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.384173] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Lock "6dc170a4-b08e-44b5-a152-832670e6866b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.384343] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Lock "6dc170a4-b08e-44b5-a152-832670e6866b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.384515] env[62204]: DEBUG nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] No waiting events found dispatching network-vif-plugged-06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 820.384686] env[62204]: WARNING nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Received unexpected event network-vif-plugged-06dfadf2-c796-4fd2-a53f-55cb955837a9 for instance with vm_state building and task_state spawning. [ 820.384892] env[62204]: DEBUG nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Received event network-changed-06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.385075] env[62204]: DEBUG nova.compute.manager [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Refreshing instance network info cache due to event network-changed-06dfadf2-c796-4fd2-a53f-55cb955837a9. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.385282] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Acquiring lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.385422] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Acquired lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.385582] env[62204]: DEBUG nova.network.neutron [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Refreshing network info cache for port 06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 820.405253] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199786, 'name': CreateVM_Task, 'duration_secs': 0.427448} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.407931] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 820.409053] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.409234] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.409594] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 820.412649] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2faa54c3-4cc6-46cb-9b24-62d6f028e9b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.414912] env[62204]: DEBUG oslo_vmware.api [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199787, 'name': PowerOffVM_Task, 'duration_secs': 0.296398} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.415191] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.415365] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.415966] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91c45606-d68b-43f2-80a7-8720825623a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.420788] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 820.420788] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525ab18a-1cdf-32f6-d7b1-a4c7b1efd877" [ 820.420788] env[62204]: _type = "Task" [ 820.420788] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.429295] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525ab18a-1cdf-32f6-d7b1-a4c7b1efd877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.462763] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199788, 'name': Rename_Task, 'duration_secs': 0.235969} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.465311] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.465799] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdfa5e83-a57e-4d24-bf28-8441e4c92ede {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.472517] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 820.472517] env[62204]: value = "task-1199790" [ 820.472517] env[62204]: _type = "Task" [ 820.472517] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.478133] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80879b8-6d12-4bf7-b591-c84b6c613bd3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.483901] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.489229] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f842d9-91c1-49e5-88bf-07a23ea12d03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.495311] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.495544] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.496524] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleting the datastore file [datastore2] a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.496524] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef06793b-65e9-482d-b3e0-e894913d6a7f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.528647] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47eaea2c-f59b-4c98-a816-470800265233 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.536056] env[62204]: DEBUG oslo_vmware.api [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 820.536056] env[62204]: value = "task-1199791" [ 820.536056] env[62204]: _type = "Task" [ 820.536056] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.543215] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5182df04-a342-4d10-a060-a966a3e28062 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.554029] env[62204]: DEBUG oslo_vmware.api [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.571333] env[62204]: DEBUG nova.compute.provider_tree [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 820.584922] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199785, 'name': ReconfigVM_Task, 'duration_secs': 0.574411} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.586245] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Reconfigured VM instance instance-00000044 to attach disk [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/f5f0c15f-ae0d-4615-93ab-3203a5d7e090.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 820.587517] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23074785-82ae-4a40-9413-68bf155d42ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.601665] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 820.601665] env[62204]: value = "task-1199792" [ 820.601665] env[62204]: _type = "Task" [ 820.601665] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.612940] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199767, 'name': CloneVM_Task, 'duration_secs': 5.30382} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.615555] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Created linked-clone VM from snapshot [ 820.616787] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035d0e1b-1377-434c-9a6c-65d7d142df3d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.626192] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199792, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.633427] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Uploading image aae8f3fb-ae2c-4ce4-a446-fb8637ad83c9 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 820.667270] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 820.667270] env[62204]: value = "vm-260045" [ 820.667270] env[62204]: _type = "VirtualMachine" [ 820.667270] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 820.667477] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-41a0e363-ca16-4158-8e28-9239f2e5532d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.674546] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease: (returnval){ [ 820.674546] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5230b9c2-e8d6-ac88-a7fb-8067a6348af9" [ 820.674546] env[62204]: _type = "HttpNfcLease" [ 820.674546] env[62204]: } obtained for exporting VM: (result){ [ 820.674546] env[62204]: value = "vm-260045" [ 820.674546] env[62204]: _type = "VirtualMachine" [ 820.674546] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 820.675073] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the lease: (returnval){ [ 820.675073] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5230b9c2-e8d6-ac88-a7fb-8067a6348af9" [ 820.675073] env[62204]: _type = "HttpNfcLease" [ 820.675073] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 820.679350] env[62204]: DEBUG nova.network.neutron [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updated VIF entry in instance network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 820.679746] env[62204]: DEBUG nova.network.neutron [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap52d592a0-43", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.682533] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 820.682533] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5230b9c2-e8d6-ac88-a7fb-8067a6348af9" [ 820.682533] env[62204]: _type = "HttpNfcLease" [ 820.682533] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 820.741600] env[62204]: DEBUG nova.objects.instance [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 820.780595] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 820.807100] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.807370] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.807533] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.807708] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.807859] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.808227] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.808523] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.808692] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.808868] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.809057] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.809244] env[62204]: DEBUG nova.virt.hardware [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.810459] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020baf38-bf6f-4547-a07c-2078f2d8a514 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.818846] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de15994-b1b2-4081-90ee-44bdb132e418 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.936275] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525ab18a-1cdf-32f6-d7b1-a4c7b1efd877, 'name': SearchDatastore_Task, 'duration_secs': 0.029152} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.937451] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 820.937777] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.938278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.938630] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.938768] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.939759] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61e0512e-e151-49e4-b191-2baa59f248ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.956236] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.956485] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.957510] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e7ccfe2-9fb6-48eb-a2f2-58d97ed1c2c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.964624] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 820.964624] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526ec4be-3dd0-36d2-98cf-c2ca03b4ff32" [ 820.964624] env[62204]: _type = "Task" [ 820.964624] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.974475] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526ec4be-3dd0-36d2-98cf-c2ca03b4ff32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.984085] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199790, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.047629] env[62204]: DEBUG oslo_vmware.api [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1199791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.50561} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.047964] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.048222] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 821.048471] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.048712] env[62204]: INFO nova.compute.manager [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 821.049053] env[62204]: DEBUG oslo.service.loopingcall [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 821.049298] env[62204]: DEBUG nova.compute.manager [-] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 821.049419] env[62204]: DEBUG nova.network.neutron [-] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 821.116046] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199792, 'name': Rename_Task, 'duration_secs': 0.20743} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.117240] env[62204]: DEBUG nova.scheduler.client.report [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 821.117494] env[62204]: DEBUG nova.compute.provider_tree [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 90 to 91 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 821.117688] env[62204]: DEBUG nova.compute.provider_tree [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 821.121283] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 821.121766] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0461bfe6-8a6c-4211-a898-e337ea4ab22b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.131635] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 821.131635] env[62204]: value = "task-1199794" [ 821.131635] env[62204]: _type = "Task" [ 821.131635] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.144559] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199794, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.183724] env[62204]: DEBUG oslo_concurrency.lockutils [req-f7db1cc5-2699-416a-8d1a-8d76430b0f31 req-03df1e86-119f-481d-87b6-4d669377e6b2 service nova] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.187027] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 821.187027] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5230b9c2-e8d6-ac88-a7fb-8067a6348af9" [ 821.187027] env[62204]: _type = "HttpNfcLease" [ 821.187027] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 821.187502] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 821.187502] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5230b9c2-e8d6-ac88-a7fb-8067a6348af9" [ 821.187502] env[62204]: _type = "HttpNfcLease" [ 821.187502] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 821.188546] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bef0ed-04db-4ad9-85f4-1947c9b21391 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.200037] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5273c912-b0b0-d145-1549-d4b260be6fe9/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 821.200349] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5273c912-b0b0-d145-1549-d4b260be6fe9/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 821.266307] env[62204]: DEBUG oslo_concurrency.lockutils [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.266544] env[62204]: DEBUG oslo_concurrency.lockutils [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.266856] env[62204]: DEBUG nova.network.neutron [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.268021] env[62204]: DEBUG nova.objects.instance [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'info_cache' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.272601] env[62204]: DEBUG nova.network.neutron [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Updated VIF entry in instance network info cache for port 06dfadf2-c796-4fd2-a53f-55cb955837a9. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 821.272601] env[62204]: DEBUG nova.network.neutron [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Updating instance_info_cache with network_info: [{"id": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "address": "fa:16:3e:1d:bf:ce", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06dfadf2-c7", "ovs_interfaceid": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.352856] env[62204]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 1eef631c-fa58-4ad5-862e-c25b847eaa2c could not be found.", "detail": ""}} {{(pid=62204) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 821.353264] env[62204]: DEBUG nova.network.neutron [-] Unable to show port 1eef631c-fa58-4ad5-862e-c25b847eaa2c as it no longer exists. {{(pid=62204) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 821.359862] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6e52bc1f-3f60-444c-87ea-bff394e892bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.476262] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526ec4be-3dd0-36d2-98cf-c2ca03b4ff32, 'name': SearchDatastore_Task, 'duration_secs': 0.016498} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.480066] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d96beb20-758d-484f-80ae-64b3e6e8f7fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.489351] env[62204]: DEBUG oslo_vmware.api [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199790, 'name': PowerOnVM_Task, 'duration_secs': 0.669613} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.489729] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 821.489729] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52188106-e260-440c-2f62-2d4316bff153" [ 821.489729] env[62204]: _type = "Task" [ 821.489729] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.490109] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.490377] env[62204]: DEBUG nova.compute.manager [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 821.491280] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0190e148-40a0-427e-8d5e-3eeb70464bc3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.505319] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52188106-e260-440c-2f62-2d4316bff153, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.625040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.866s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.625040] env[62204]: INFO nova.compute.manager [None req-f41aceb1-b61d-4be1-9511-7d8e1fb3efba tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Successfully reverted task state from rebuilding on failure for instance. [ 821.631627] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.054s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.631627] env[62204]: DEBUG nova.objects.instance [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lazy-loading 'resources' on Instance uuid cce823b9-6a03-4902-9794-2b93f99eef94 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 821.645503] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199794, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.691451] env[62204]: DEBUG nova.compute.manager [req-ffeb4263-17de-47b5-9baf-f733dd919fcd req-688cf35c-1209-43ab-bd1d-ab4c4dedcbc1 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Received event network-vif-plugged-21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 821.693087] env[62204]: DEBUG oslo_concurrency.lockutils [req-ffeb4263-17de-47b5-9baf-f733dd919fcd req-688cf35c-1209-43ab-bd1d-ab4c4dedcbc1 service nova] Acquiring lock "25563dec-7e4d-42d9-b922-0b2354b5d70e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.693087] env[62204]: DEBUG oslo_concurrency.lockutils [req-ffeb4263-17de-47b5-9baf-f733dd919fcd req-688cf35c-1209-43ab-bd1d-ab4c4dedcbc1 service nova] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.693087] env[62204]: DEBUG oslo_concurrency.lockutils [req-ffeb4263-17de-47b5-9baf-f733dd919fcd req-688cf35c-1209-43ab-bd1d-ab4c4dedcbc1 service nova] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.693087] env[62204]: DEBUG nova.compute.manager [req-ffeb4263-17de-47b5-9baf-f733dd919fcd req-688cf35c-1209-43ab-bd1d-ab4c4dedcbc1 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] No waiting events found dispatching network-vif-plugged-21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 821.693087] env[62204]: WARNING nova.compute.manager [req-ffeb4263-17de-47b5-9baf-f733dd919fcd req-688cf35c-1209-43ab-bd1d-ab4c4dedcbc1 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Received unexpected event network-vif-plugged-21c10daf-76af-4fd9-8681-58fdf9ea566f for instance with vm_state building and task_state spawning. [ 821.771834] env[62204]: DEBUG nova.objects.base [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Object Instance<4793e9fd-be87-4885-8f0e-1fcef6ce4d2f> lazy-loaded attributes: flavor,info_cache {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 821.778630] env[62204]: DEBUG oslo_concurrency.lockutils [req-33c17803-f46a-4206-8d51-f776171725f2 req-0bce58a8-ad12-4e51-9979-63db7f889725 service nova] Releasing lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.852427] env[62204]: DEBUG nova.network.neutron [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Successfully updated port: 21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.006354] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52188106-e260-440c-2f62-2d4316bff153, 'name': SearchDatastore_Task, 'duration_secs': 0.01768} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.006837] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.007320] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/6dc170a4-b08e-44b5-a152-832670e6866b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.008293] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca54ae4c-dce6-4ea1-8aca-c8f977d5e117 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.020266] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 822.020266] env[62204]: value = "task-1199795" [ 822.020266] env[62204]: _type = "Task" [ 822.020266] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.020474] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 822.032535] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199795, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.149921] env[62204]: DEBUG oslo_vmware.api [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199794, 'name': PowerOnVM_Task, 'duration_secs': 0.707816} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.150234] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 822.150436] env[62204]: INFO nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Took 10.15 seconds to spawn the instance on the hypervisor. [ 822.150644] env[62204]: DEBUG nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 822.152458] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a91cb3-48c1-42ea-bcc6-c171032c0042 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.357726] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.357895] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.358218] env[62204]: DEBUG nova.network.neutron [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 822.543438] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199795, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.597174] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b279568-1441-43af-b9f9-7d3ae35823b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.609196] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f0503c-597d-4477-bf43-7da7b40424f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.615916] env[62204]: DEBUG nova.network.neutron [-] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.622884] env[62204]: DEBUG nova.network.neutron [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating instance_info_cache with network_info: [{"id": "ac345dde-4672-4c9d-a224-24ebc7900628", "address": "fa:16:3e:41:0f:f2", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac345dde-46", "ovs_interfaceid": "ac345dde-4672-4c9d-a224-24ebc7900628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.663963] env[62204]: DEBUG nova.compute.manager [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Received event network-changed-21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.664178] env[62204]: DEBUG nova.compute.manager [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Refreshing instance network info cache due to event network-changed-21c10daf-76af-4fd9-8681-58fdf9ea566f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 822.664462] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] Acquiring lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 822.665283] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171f5762-00cb-4edb-8ddc-c6ec520eed46 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.679962] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1613eb98-6232-4701-8aa5-ec73d479d997 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.688075] env[62204]: INFO nova.compute.manager [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Took 36.46 seconds to build instance. [ 822.698314] env[62204]: DEBUG nova.compute.provider_tree [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.883492] env[62204]: DEBUG nova.compute.manager [req-38058812-6e8f-4d10-ad2a-df547666b837 req-2ac38b86-b0b4-40d3-b37c-ee7f6a34250f service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Received event network-vif-deleted-c81069ae-b3b9-4b0d-902f-ed9a2e24542f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.894749] env[62204]: DEBUG nova.network.neutron [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.911994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.033167] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199795, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63888} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.033728] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/6dc170a4-b08e-44b5-a152-832670e6866b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.034058] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.034384] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae40da55-82a5-46e7-a432-7d1033d4b6d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.045586] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 823.045586] env[62204]: value = "task-1199796" [ 823.045586] env[62204]: _type = "Task" [ 823.045586] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.060457] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199796, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.116717] env[62204]: DEBUG nova.network.neutron [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updating instance_info_cache with network_info: [{"id": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "address": "fa:16:3e:df:a6:1a", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21c10daf-76", "ovs_interfaceid": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.118781] env[62204]: INFO nova.compute.manager [-] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Took 2.07 seconds to deallocate network for instance. [ 823.124988] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.125472] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.126157] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.126417] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.126791] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.129345] env[62204]: DEBUG oslo_concurrency.lockutils [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.130808] env[62204]: INFO nova.compute.manager [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Terminating instance [ 823.132939] env[62204]: DEBUG nova.compute.manager [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 823.133167] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.134096] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a4aa0b-ee41-4a66-a10e-9265677c3782 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.145749] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.146136] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16fb9d6a-a776-461c-a9f3-428c64707c45 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.155580] env[62204]: DEBUG oslo_vmware.api [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 823.155580] env[62204]: value = "task-1199797" [ 823.155580] env[62204]: _type = "Task" [ 823.155580] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.167495] env[62204]: DEBUG oslo_vmware.api [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.189682] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd14d59f-8d88-410b-ac97-cf52cc0a1aa7 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.041s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.202379] env[62204]: DEBUG nova.scheduler.client.report [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.558467] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199796, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085781} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.558910] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 823.559775] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b5231d-8580-451e-9a5f-a22fbbd6f1a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.596159] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/6dc170a4-b08e-44b5-a152-832670e6866b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.596720] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c1baca9-af98-4a5c-81c3-15fb01e7028b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.621810] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 823.622381] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Instance network_info: |[{"id": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "address": "fa:16:3e:df:a6:1a", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21c10daf-76", "ovs_interfaceid": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 823.622848] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] Acquired lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.623168] env[62204]: DEBUG nova.network.neutron [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Refreshing network info cache for port 21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 823.624859] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:a6:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd098b1c-636f-492d-b5ae-037cb0cae454', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21c10daf-76af-4fd9-8681-58fdf9ea566f', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.635087] env[62204]: DEBUG oslo.service.loopingcall [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 823.639473] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.639819] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.641112] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.642607] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3e66c44-3ad4-4616-8b8a-b6c7ce843e40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.657252] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7279d86d-43d4-4eba-9708-d201c552bf1f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.659511] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 823.659511] env[62204]: value = "task-1199798" [ 823.659511] env[62204]: _type = "Task" [ 823.659511] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.670251] env[62204]: DEBUG oslo_vmware.api [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 823.670251] env[62204]: value = "task-1199799" [ 823.670251] env[62204]: _type = "Task" [ 823.670251] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.672691] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.672691] env[62204]: value = "task-1199800" [ 823.672691] env[62204]: _type = "Task" [ 823.672691] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.686278] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199798, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.686614] env[62204]: DEBUG oslo_vmware.api [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199797, 'name': PowerOffVM_Task, 'duration_secs': 0.35903} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.687390] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.687569] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.688177] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-947d0f33-0fb1-4a37-992c-195ffcd42209 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.696028] env[62204]: DEBUG nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 823.699714] env[62204]: DEBUG oslo_vmware.api [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.704665] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199800, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.707722] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.710887] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.819s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 823.711298] env[62204]: DEBUG nova.objects.instance [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lazy-loading 'resources' on Instance uuid 69604167-6a61-4723-bf7d-7ba168837839 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 823.745474] env[62204]: INFO nova.scheduler.client.report [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Deleted allocations for instance cce823b9-6a03-4902-9794-2b93f99eef94 [ 823.791533] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281d8f4-9b48-b76e-5d3e-cd8a5ea2d8c8/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 823.792989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8563a3bd-edc8-4378-98bb-5a967cdff0bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.797677] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 823.797948] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 823.798281] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Deleting the datastore file [datastore1] 60eaec9c-5dcc-4e2f-9649-78acba318a6b {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.799078] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25798806-b809-4658-93fa-5831f2ffe4c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.804104] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281d8f4-9b48-b76e-5d3e-cd8a5ea2d8c8/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 823.804401] env[62204]: ERROR oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281d8f4-9b48-b76e-5d3e-cd8a5ea2d8c8/disk-0.vmdk due to incomplete transfer. [ 823.804732] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5eb82619-922e-4409-a3e5-9e3b8c37bc89 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.809027] env[62204]: DEBUG oslo_vmware.api [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for the task: (returnval){ [ 823.809027] env[62204]: value = "task-1199802" [ 823.809027] env[62204]: _type = "Task" [ 823.809027] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.816927] env[62204]: DEBUG oslo_vmware.rw_handles [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281d8f4-9b48-b76e-5d3e-cd8a5ea2d8c8/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 823.817286] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Uploaded image 4801d6e4-f18a-4d32-b037-0f23254b78b2 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 823.820130] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 823.824581] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e3829c22-5ceb-4456-a6a6-a4658ce29352 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.826524] env[62204]: DEBUG oslo_vmware.api [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.834019] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 823.834019] env[62204]: value = "task-1199803" [ 823.834019] env[62204]: _type = "Task" [ 823.834019] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.845740] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199803, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.178712] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199798, 'name': ReconfigVM_Task, 'duration_secs': 0.477536} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.182232] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/6dc170a4-b08e-44b5-a152-832670e6866b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 824.185884] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ce7f92a-d080-49be-9257-41c46dd01984 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.197433] env[62204]: DEBUG oslo_vmware.api [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199799, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.202013] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 824.202013] env[62204]: value = "task-1199804" [ 824.202013] env[62204]: _type = "Task" [ 824.202013] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.204466] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199800, 'name': CreateVM_Task, 'duration_secs': 0.441094} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.204908] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 824.211371] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.211560] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.211884] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 824.212530] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17e40dfc-9e93-4c40-9d61-568055e9997a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.224559] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 824.224559] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb9bf8-c7b8-52ea-be56-ae762bac1a53" [ 824.224559] env[62204]: _type = "Task" [ 824.224559] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.230068] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199804, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.241754] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb9bf8-c7b8-52ea-be56-ae762bac1a53, 'name': SearchDatastore_Task, 'duration_secs': 0.01614} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.243011] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.243370] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 824.243636] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.243876] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 824.244058] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.244252] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.244556] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b05819d5-64e4-4551-9fbf-d6146be5ed47 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.260241] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.260455] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.264028] env[62204]: DEBUG oslo_concurrency.lockutils [None req-01509223-53b6-4205-927d-ea63769ea506 tempest-VolumesAdminNegativeTest-935972255 tempest-VolumesAdminNegativeTest-935972255-project-member] Lock "cce823b9-6a03-4902-9794-2b93f99eef94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.952s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.264028] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f6f774d-ba4e-4948-88bb-bcc7aad69dd4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.269698] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 824.269698] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5228ee31-5a70-cf92-9acb-bcdc726fcc61" [ 824.269698] env[62204]: _type = "Task" [ 824.269698] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.283022] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5228ee31-5a70-cf92-9acb-bcdc726fcc61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.320393] env[62204]: DEBUG oslo_vmware.api [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Task: {'id': task-1199802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306833} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.320677] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.320871] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.321068] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.321253] env[62204]: INFO nova.compute.manager [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 824.321501] env[62204]: DEBUG oslo.service.loopingcall [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 824.322084] env[62204]: DEBUG nova.compute.manager [-] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 824.322232] env[62204]: DEBUG nova.network.neutron [-] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 824.348434] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199803, 'name': Destroy_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.459455] env[62204]: DEBUG nova.network.neutron [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updated VIF entry in instance network info cache for port 21c10daf-76af-4fd9-8681-58fdf9ea566f. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 824.459771] env[62204]: DEBUG nova.network.neutron [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updating instance_info_cache with network_info: [{"id": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "address": "fa:16:3e:df:a6:1a", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21c10daf-76", "ovs_interfaceid": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.654672] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af5b3c5-c8c8-49ef-a20c-975bdbc8ac95 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.665138] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862a6a89-1de8-40b8-93db-f26dea02bf7e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.706916] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e815cc-f918-4adc-b227-b3d496a28f2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.723867] env[62204]: DEBUG oslo_vmware.api [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199799, 'name': PowerOnVM_Task, 'duration_secs': 0.572824} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.727919] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f76a92-b301-40fe-be2b-b14ac0a303d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.731920] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.732153] env[62204]: DEBUG nova.compute.manager [None req-82d0fc60-a573-4514-8aea-f104a01ecd81 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 824.732468] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199804, 'name': Rename_Task, 'duration_secs': 0.205171} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.733199] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-532a6bfb-5ea2-40ea-b06f-26dc4e27d74f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.736594] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.737347] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e74a358-d8d1-4a49-ac0f-47cce06d3504 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.750524] env[62204]: DEBUG nova.compute.provider_tree [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.754952] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 824.754952] env[62204]: value = "task-1199805" [ 824.754952] env[62204]: _type = "Task" [ 824.754952] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.769177] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199805, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.783097] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5228ee31-5a70-cf92-9acb-bcdc726fcc61, 'name': SearchDatastore_Task, 'duration_secs': 0.019008} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.784139] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bff86447-22ae-4c85-9ae1-8b0fd9a1f122 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.791268] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 824.791268] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521baeae-aa0c-465e-1bfe-0343bfd370b4" [ 824.791268] env[62204]: _type = "Task" [ 824.791268] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.801442] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521baeae-aa0c-465e-1bfe-0343bfd370b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.846411] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199803, 'name': Destroy_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.962138] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4324e5d-0ad3-4b55-9e54-f0ebd49cb86d req-85566780-ea22-4f7e-9c06-cb644a0b18e6 service nova] Releasing lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.048574] env[62204]: DEBUG nova.compute.manager [req-e24b16fc-0823-48cd-b9de-d4c7e3615771 req-1e2c0d92-649b-4cab-93dc-cbb34901abd9 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Received event network-vif-deleted-e8eedf25-892e-4ebf-94d9-22b836abd37c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 825.048630] env[62204]: INFO nova.compute.manager [req-e24b16fc-0823-48cd-b9de-d4c7e3615771 req-1e2c0d92-649b-4cab-93dc-cbb34901abd9 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Neutron deleted interface e8eedf25-892e-4ebf-94d9-22b836abd37c; detaching it from the instance and deleting it from the info cache [ 825.048849] env[62204]: DEBUG nova.network.neutron [req-e24b16fc-0823-48cd-b9de-d4c7e3615771 req-1e2c0d92-649b-4cab-93dc-cbb34901abd9 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.145936] env[62204]: DEBUG nova.network.neutron [-] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.259308] env[62204]: DEBUG nova.scheduler.client.report [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.282482] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199805, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.302725] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521baeae-aa0c-465e-1bfe-0343bfd370b4, 'name': SearchDatastore_Task, 'duration_secs': 0.018828} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.303152] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 825.303413] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 25563dec-7e4d-42d9-b922-0b2354b5d70e/25563dec-7e4d-42d9-b922-0b2354b5d70e.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.304077] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60091789-f1b2-4e08-8188-c9d227295b4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.313295] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 825.313295] env[62204]: value = "task-1199806" [ 825.313295] env[62204]: _type = "Task" [ 825.313295] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.323119] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.346317] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199803, 'name': Destroy_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.500666] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2c393123-87de-460a-965d-43473478a79f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.501084] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2c393123-87de-460a-965d-43473478a79f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.501353] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2c393123-87de-460a-965d-43473478a79f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.501549] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2c393123-87de-460a-965d-43473478a79f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.501722] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2c393123-87de-460a-965d-43473478a79f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.507148] env[62204]: INFO nova.compute.manager [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Terminating instance [ 825.507148] env[62204]: DEBUG nova.compute.manager [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 825.507148] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 825.507570] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec620f6a-1844-42f7-ab8e-ebafbfec069f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.516957] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 825.517266] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84fb11b5-0be7-4fd3-963a-5de02ac3330d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.526618] env[62204]: DEBUG oslo_vmware.api [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 825.526618] env[62204]: value = "task-1199807" [ 825.526618] env[62204]: _type = "Task" [ 825.526618] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.536318] env[62204]: DEBUG oslo_vmware.api [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199807, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.551915] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f5026b0-7ad8-4119-910d-a5f070445c8d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.563987] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ed8026-c934-4066-bf49-b6a5a57a06c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.602496] env[62204]: DEBUG nova.compute.manager [req-e24b16fc-0823-48cd-b9de-d4c7e3615771 req-1e2c0d92-649b-4cab-93dc-cbb34901abd9 service nova] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Detach interface failed, port_id=e8eedf25-892e-4ebf-94d9-22b836abd37c, reason: Instance 60eaec9c-5dcc-4e2f-9649-78acba318a6b could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 825.649477] env[62204]: INFO nova.compute.manager [-] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Took 1.33 seconds to deallocate network for instance. [ 825.740276] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.740702] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.740745] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 825.740979] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.741175] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.743480] env[62204]: INFO nova.compute.manager [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Terminating instance [ 825.746198] env[62204]: DEBUG nova.compute.manager [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 825.746198] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 825.746931] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933de162-278b-4d63-8b0a-a1c77347f032 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.756339] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 825.757058] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2dc11376-881e-488c-b0ab-bda5c966b6f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.768050] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.771764] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.911s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.775718] env[62204]: INFO nova.compute.claims [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 825.785842] env[62204]: DEBUG oslo_vmware.api [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199805, 'name': PowerOnVM_Task, 'duration_secs': 0.722909} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.788050] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.788322] env[62204]: INFO nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Took 9.78 seconds to spawn the instance on the hypervisor. [ 825.788508] env[62204]: DEBUG nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 825.795300] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb3d637-7d62-4e7e-9800-ec57389840c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.827897] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199806, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.852506] env[62204]: DEBUG oslo_vmware.api [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199803, 'name': Destroy_Task, 'duration_secs': 1.603723} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.852903] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Destroyed the VM [ 825.853341] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 825.854569] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f54be477-f4b4-460f-8429-d07819c53d84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.863034] env[62204]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 825.863034] env[62204]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62204) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 825.863743] env[62204]: DEBUG nova.compute.utils [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Cleaning up image 4801d6e4-f18a-4d32-b037-0f23254b78b2 {{(pid=62204) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 825.868397] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.869142] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.869422] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleting the datastore file [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.869976] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d012ea83-73b6-44cc-be41-b978fba3e8c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.878598] env[62204]: DEBUG oslo_vmware.api [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 825.878598] env[62204]: value = "task-1199809" [ 825.878598] env[62204]: _type = "Task" [ 825.878598] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.895079] env[62204]: DEBUG oslo_vmware.api [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199809, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.038859] env[62204]: DEBUG oslo_vmware.api [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199807, 'name': PowerOffVM_Task, 'duration_secs': 0.268355} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.039344] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 826.039548] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 826.039830] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ad9388c-ae05-4c95-b0c3-b6a65e19ef18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.136156] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 826.136420] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 826.136635] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleting the datastore file [datastore2] 2c393123-87de-460a-965d-43473478a79f {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 826.136958] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15a9fcf0-1114-4373-9603-62b688ad676e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.144307] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.144545] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.149185] env[62204]: DEBUG oslo_vmware.api [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 826.149185] env[62204]: value = "task-1199811" [ 826.149185] env[62204]: _type = "Task" [ 826.149185] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.160153] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.160397] env[62204]: DEBUG oslo_vmware.api [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.302096] env[62204]: DEBUG oslo_concurrency.lockutils [None req-81f044f0-5b17-4201-ba35-92cf5be89e8b tempest-ServerActionsV293TestJSON-1601953463 tempest-ServerActionsV293TestJSON-1601953463-project-member] Lock "69604167-6a61-4723-bf7d-7ba168837839" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.078s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.323175] env[62204]: INFO nova.compute.manager [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Took 37.40 seconds to build instance. [ 826.335152] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576705} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.335750] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 25563dec-7e4d-42d9-b922-0b2354b5d70e/25563dec-7e4d-42d9-b922-0b2354b5d70e.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.336746] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.337041] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98ab24b9-cc1e-4973-9c41-f0454ee0d5a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.347962] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 826.347962] env[62204]: value = "task-1199812" [ 826.347962] env[62204]: _type = "Task" [ 826.347962] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.364357] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.400344] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8 [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Traceback (most recent call last): [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] ds_util.file_delete(self._session, [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] session._wait_for_task(file_delete_task) [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] return self.wait_for_task(task_ref) [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] return evt.wait() [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] result = hub.switch() [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] return self.greenlet.switch() [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] self.f(*self.args, **self.kw) [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] raise exceptions.translate_fault(task_info.error) [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore2] dba1edda-edfd-4a97-ab95-48f3f5a933f8 [ 826.400344] env[62204]: ERROR nova.virt.vmwareapi.vmops [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] [ 826.403358] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.403567] env[62204]: INFO nova.compute.manager [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Took 0.66 seconds to destroy the instance on the hypervisor. [ 826.403820] env[62204]: DEBUG oslo.service.loopingcall [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.404052] env[62204]: DEBUG nova.compute.manager [-] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.404658] env[62204]: DEBUG nova.network.neutron [-] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 826.664582] env[62204]: DEBUG oslo_vmware.api [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233473} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.666097] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 826.666404] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 826.666935] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 826.666935] env[62204]: INFO nova.compute.manager [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 826.666935] env[62204]: DEBUG oslo.service.loopingcall [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 826.670559] env[62204]: DEBUG nova.compute.manager [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 826.670676] env[62204]: DEBUG nova.network.neutron [-] [instance: 2c393123-87de-460a-965d-43473478a79f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 826.672448] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.672679] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.825556] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0734ffa7-5f82-4a50-8a9d-d4adb8e0c151 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "6dc170a4-b08e-44b5-a152-832670e6866b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.007s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.863271] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084056} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.863977] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.865057] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86f0fc7-6b96-4da4-878e-a0b7800eec46 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.903019] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 25563dec-7e4d-42d9-b922-0b2354b5d70e/25563dec-7e4d-42d9-b922-0b2354b5d70e.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.907488] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-538e4074-437a-4404-9442-fc51f7e675d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.935178] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 826.935178] env[62204]: value = "task-1199813" [ 826.935178] env[62204]: _type = "Task" [ 826.935178] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.950673] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199813, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.085091] env[62204]: DEBUG nova.compute.manager [req-3debe670-e5f5-451c-8bfb-448372509e1c req-1c179c00-8b43-4589-ad54-dc70024679b4 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Received event network-vif-deleted-8d18fd6c-f785-481a-bfed-eea83e68e234 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.086504] env[62204]: INFO nova.compute.manager [req-3debe670-e5f5-451c-8bfb-448372509e1c req-1c179c00-8b43-4589-ad54-dc70024679b4 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Neutron deleted interface 8d18fd6c-f785-481a-bfed-eea83e68e234; detaching it from the instance and deleting it from the info cache [ 827.086793] env[62204]: DEBUG nova.network.neutron [req-3debe670-e5f5-451c-8bfb-448372509e1c req-1c179c00-8b43-4589-ad54-dc70024679b4 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.307865] env[62204]: DEBUG nova.compute.manager [req-b82e4bde-1473-4864-b10c-0391fda9b989 req-ba5bb11a-c1d6-44aa-9fb7-bafc2f729447 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Received event network-vif-deleted-8a6c2acb-ed90-490d-b4c8-9ab8037a80d2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.307865] env[62204]: INFO nova.compute.manager [req-b82e4bde-1473-4864-b10c-0391fda9b989 req-ba5bb11a-c1d6-44aa-9fb7-bafc2f729447 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Neutron deleted interface 8a6c2acb-ed90-490d-b4c8-9ab8037a80d2; detaching it from the instance and deleting it from the info cache [ 827.307865] env[62204]: DEBUG nova.network.neutron [req-b82e4bde-1473-4864-b10c-0391fda9b989 req-ba5bb11a-c1d6-44aa-9fb7-bafc2f729447 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.335516] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.354178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab1e10a-3015-4a8e-8bbb-cc33e3dd7d86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.364591] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d08f6e-0250-408a-a0b8-21129baa7a45 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.402692] env[62204]: DEBUG nova.network.neutron [-] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.407051] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdad96a-fef7-48eb-9300-21c43526befb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.417757] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19790e84-82ab-421a-9f9e-97b0447ea46a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.427464] env[62204]: DEBUG oslo_concurrency.lockutils [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.435676] env[62204]: DEBUG nova.compute.provider_tree [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.449023] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199813, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.595809] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b35fe26a-634f-4c2d-80e7-417cccaa31de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.607071] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a260d901-a0e5-4863-a83b-4dfd8f6de599 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.648621] env[62204]: DEBUG nova.network.neutron [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.650111] env[62204]: DEBUG nova.compute.manager [req-3debe670-e5f5-451c-8bfb-448372509e1c req-1c179c00-8b43-4589-ad54-dc70024679b4 service nova] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Detach interface failed, port_id=8d18fd6c-f785-481a-bfed-eea83e68e234, reason: Instance dba1edda-edfd-4a97-ab95-48f3f5a933f8 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 827.676398] env[62204]: INFO nova.compute.manager [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Rescuing [ 827.676748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.676968] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.677166] env[62204]: DEBUG nova.network.neutron [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 827.808844] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b07a75b-0164-427b-a8c3-471f42593eb1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.819778] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6526dcd-990a-4199-821f-f7723b9a468b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.855155] env[62204]: DEBUG nova.compute.manager [req-b82e4bde-1473-4864-b10c-0391fda9b989 req-ba5bb11a-c1d6-44aa-9fb7-bafc2f729447 service nova] [instance: 2c393123-87de-460a-965d-43473478a79f] Detach interface failed, port_id=8a6c2acb-ed90-490d-b4c8-9ab8037a80d2, reason: Instance 2c393123-87de-460a-965d-43473478a79f could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 827.872785] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.905643] env[62204]: INFO nova.compute.manager [-] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Took 1.50 seconds to deallocate network for instance. [ 827.942872] env[62204]: DEBUG nova.scheduler.client.report [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.955357] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199813, 'name': ReconfigVM_Task, 'duration_secs': 0.559857} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.955714] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 25563dec-7e4d-42d9-b922-0b2354b5d70e/25563dec-7e4d-42d9-b922-0b2354b5d70e.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.956409] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74bc1bde-264f-4562-aa2a-4f3104c9f286 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.968512] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 827.968512] env[62204]: value = "task-1199814" [ 827.968512] env[62204]: _type = "Task" [ 827.968512] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.982840] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199814, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.152160] env[62204]: INFO nova.compute.manager [-] [instance: 2c393123-87de-460a-965d-43473478a79f] Took 1.48 seconds to deallocate network for instance. [ 828.414977] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.454206] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.454206] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 828.457650] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.186s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 828.466138] env[62204]: INFO nova.compute.claims [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.488335] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199814, 'name': Rename_Task, 'duration_secs': 0.213224} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.488335] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.488335] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-355c99b8-05a6-4ef8-aedc-a3a175083757 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.502771] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 828.502771] env[62204]: value = "task-1199815" [ 828.502771] env[62204]: _type = "Task" [ 828.502771] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.519286] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199815, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.661220] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.717978] env[62204]: DEBUG nova.network.neutron [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Updating instance_info_cache with network_info: [{"id": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "address": "fa:16:3e:1d:bf:ce", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap06dfadf2-c7", "ovs_interfaceid": "06dfadf2-c796-4fd2-a53f-55cb955837a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.970557] env[62204]: DEBUG nova.compute.utils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 828.972213] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 828.975813] env[62204]: DEBUG nova.network.neutron [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 829.021743] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199815, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.075766] env[62204]: DEBUG nova.policy [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c29444fe298d48578ce250063841a3c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec87b51786754b05aa75abb818bdbc15', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 829.220817] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-6dc170a4-b08e-44b5-a152-832670e6866b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.478101] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 829.519064] env[62204]: DEBUG oslo_vmware.api [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199815, 'name': PowerOnVM_Task, 'duration_secs': 0.733657} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.519392] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.520427] env[62204]: INFO nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Took 8.74 seconds to spawn the instance on the hypervisor. [ 829.520427] env[62204]: DEBUG nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 829.520688] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adc5197-5f00-4245-a49d-d0b393b91c55 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.753155] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.753868] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13195104-c6f6-4d81-8168-7c3dfd78c6ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.765462] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 829.765462] env[62204]: value = "task-1199816" [ 829.765462] env[62204]: _type = "Task" [ 829.765462] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.785112] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.969026] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc396f7-2bab-41ed-84cf-e624d973c544 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.978716] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ffe042-6572-4ea7-beb5-94af1d97b610 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.034219] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a593bf0-480b-4d1f-9483-78f7a654b7a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.049274] env[62204]: INFO nova.compute.manager [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Took 36.96 seconds to build instance. [ 830.052068] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c8f7c9-b142-48a0-b0ec-f1bb694ca6d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.077484] env[62204]: DEBUG nova.compute.provider_tree [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.132753] env[62204]: DEBUG nova.network.neutron [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Successfully created port: 07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.277218] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199816, 'name': PowerOffVM_Task, 'duration_secs': 0.460693} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.277545] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.278438] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8324af69-96de-4a92-946a-40d14af277bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.300066] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a375d3e9-9d9b-467c-aa93-f35bfa1a0be4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.344764] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.345533] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d45e721-899e-4966-8ad7-60bbc665bf3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.355883] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 830.355883] env[62204]: value = "task-1199817" [ 830.355883] env[62204]: _type = "Task" [ 830.355883] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.365893] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.537195] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 830.563327] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00d493b3-ec5f-422b-ba8b-430619f03550 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.960s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.576640] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 830.577127] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 830.577171] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.579579] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 830.579801] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.579974] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 830.580233] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 830.580399] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 830.580568] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 830.580735] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 830.580913] env[62204]: DEBUG nova.virt.hardware [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.581990] env[62204]: DEBUG nova.scheduler.client.report [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 830.589840] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168686e1-12dc-4e81-b3d8-3ca2977684a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.603465] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc776467-ddb5-4c32-b571-f3a9b2018368 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.870648] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 830.870896] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 830.871208] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.871412] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.871614] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 830.871878] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ea6dff6-3718-46d5-be28-a037c9d5096f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.885179] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 830.885394] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 830.886254] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06610192-0ac8-4b5d-bde9-b7a53c95ac40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.893134] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 830.893134] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b08551-c287-baf1-4685-d056f19b76f6" [ 830.893134] env[62204]: _type = "Task" [ 830.893134] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.902480] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b08551-c287-baf1-4685-d056f19b76f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.982394] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5273c912-b0b0-d145-1549-d4b260be6fe9/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 830.984480] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4dba0d-4322-4db4-867d-7e29b3457d26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.993151] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5273c912-b0b0-d145-1549-d4b260be6fe9/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 830.993270] env[62204]: ERROR oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5273c912-b0b0-d145-1549-d4b260be6fe9/disk-0.vmdk due to incomplete transfer. [ 830.994706] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-887e8645-6798-46ae-a1fb-aa0b2b727e67 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.998129] env[62204]: DEBUG nova.compute.manager [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Received event network-changed-21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 830.998129] env[62204]: DEBUG nova.compute.manager [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Refreshing instance network info cache due to event network-changed-21c10daf-76af-4fd9-8681-58fdf9ea566f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 830.998658] env[62204]: DEBUG oslo_concurrency.lockutils [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] Acquiring lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.998658] env[62204]: DEBUG oslo_concurrency.lockutils [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] Acquired lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.998658] env[62204]: DEBUG nova.network.neutron [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Refreshing network info cache for port 21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 831.010568] env[62204]: DEBUG oslo_vmware.rw_handles [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5273c912-b0b0-d145-1549-d4b260be6fe9/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 831.010791] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Uploaded image aae8f3fb-ae2c-4ce4-a446-fb8637ad83c9 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 831.013153] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 831.014131] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-659e3e6d-47d1-42db-80b8-acb6ac1a4c83 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.023379] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 831.023379] env[62204]: value = "task-1199818" [ 831.023379] env[62204]: _type = "Task" [ 831.023379] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.033922] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199818, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.073382] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.094893] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.637s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.096034] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 831.098259] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.499s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.100296] env[62204]: INFO nova.compute.claims [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.403732] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b08551-c287-baf1-4685-d056f19b76f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010162} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.404568] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ad2da5-7bf9-4867-9641-5f2b1e17f0b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.411297] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 831.411297] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c1ad0-a1c3-b2b3-88d5-ba38d31de88b" [ 831.411297] env[62204]: _type = "Task" [ 831.411297] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.420900] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c1ad0-a1c3-b2b3-88d5-ba38d31de88b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.534017] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199818, 'name': Destroy_Task, 'duration_secs': 0.412396} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.536484] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroyed the VM [ 831.536720] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 831.537056] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-11189911-6123-4a4f-9e33-9b6c731915a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.546117] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 831.546117] env[62204]: value = "task-1199819" [ 831.546117] env[62204]: _type = "Task" [ 831.546117] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.554869] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199819, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.595994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.604915] env[62204]: DEBUG nova.compute.utils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 831.606931] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 831.607131] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 831.690422] env[62204]: DEBUG nova.policy [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a29d584772d84abe8d36db8dffcc0729', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc9c47a4209c4f158e39dd04afd17fa5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 831.696517] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.696775] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.697011] env[62204]: INFO nova.compute.manager [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Rebooting instance [ 831.718980] env[62204]: DEBUG nova.network.neutron [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updated VIF entry in instance network info cache for port 21c10daf-76af-4fd9-8681-58fdf9ea566f. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 831.719599] env[62204]: DEBUG nova.network.neutron [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updating instance_info_cache with network_info: [{"id": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "address": "fa:16:3e:df:a6:1a", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21c10daf-76", "ovs_interfaceid": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.927935] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c1ad0-a1c3-b2b3-88d5-ba38d31de88b, 'name': SearchDatastore_Task, 'duration_secs': 0.010544} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.928348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.928748] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. {{(pid=62204) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 831.929043] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0c11821-2800-4c25-af27-496f72e5fd14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.941079] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 831.941079] env[62204]: value = "task-1199820" [ 831.941079] env[62204]: _type = "Task" [ 831.941079] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.951898] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199820, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.061289] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199819, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.112577] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 832.226937] env[62204]: DEBUG oslo_concurrency.lockutils [req-7b876e50-988a-4a99-9ba1-e0b38539f1e1 req-da2338ab-9bf2-45ef-bfaf-9d98a1ac0bf9 service nova] Releasing lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.235992] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.235992] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquired lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.235992] env[62204]: DEBUG nova.network.neutron [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 832.440053] env[62204]: DEBUG nova.network.neutron [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Successfully updated port: 07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 832.451806] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Successfully created port: d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.462096] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199820, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509119} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.464950] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. [ 832.467246] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e942e442-2c93-4689-a9ea-1db9d5f0f4ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.501291] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Reconfiguring VM instance instance-00000045 to attach disk [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.503435] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ab88d69-182e-4c3d-b11b-c4b6a4dea027 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.529306] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 832.529306] env[62204]: value = "task-1199821" [ 832.529306] env[62204]: _type = "Task" [ 832.529306] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.539157] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199821, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.561029] env[62204]: DEBUG oslo_vmware.api [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199819, 'name': RemoveSnapshot_Task, 'duration_secs': 0.585607} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.561587] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 832.561860] env[62204]: INFO nova.compute.manager [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 19.79 seconds to snapshot the instance on the hypervisor. [ 832.672186] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95320c2-dee0-44d0-8be4-0415ad95cff6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.681144] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b00fb9-8659-4eb8-8fe8-9d7368186a2e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.713062] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e66fb20-0d50-45f2-a5d0-4608daca98f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.721668] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa5f9a0-6a76-4908-98d0-8e40a4125623 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.735506] env[62204]: DEBUG nova.compute.provider_tree [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.946910] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.946910] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.946910] env[62204]: DEBUG nova.network.neutron [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.046085] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199821, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.080636] env[62204]: DEBUG nova.compute.manager [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Received event network-vif-plugged-07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.080636] env[62204]: DEBUG oslo_concurrency.lockutils [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] Acquiring lock "2b728904-19ef-4773-9260-c615da522801-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.080636] env[62204]: DEBUG oslo_concurrency.lockutils [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] Lock "2b728904-19ef-4773-9260-c615da522801-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.080636] env[62204]: DEBUG oslo_concurrency.lockutils [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] Lock "2b728904-19ef-4773-9260-c615da522801-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.080636] env[62204]: DEBUG nova.compute.manager [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] No waiting events found dispatching network-vif-plugged-07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 833.080636] env[62204]: WARNING nova.compute.manager [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Received unexpected event network-vif-plugged-07d4d455-7847-4302-9485-d456629057e7 for instance with vm_state building and task_state spawning. [ 833.080636] env[62204]: DEBUG nova.compute.manager [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Received event network-changed-07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 833.081222] env[62204]: DEBUG nova.compute.manager [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Refreshing instance network info cache due to event network-changed-07d4d455-7847-4302-9485-d456629057e7. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 833.081222] env[62204]: DEBUG oslo_concurrency.lockutils [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] Acquiring lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.124033] env[62204]: DEBUG nova.compute.manager [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Found 3 images (rotation: 2) {{(pid=62204) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 833.124725] env[62204]: DEBUG nova.compute.manager [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Rotating out 1 backups {{(pid=62204) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4562}} [ 833.124937] env[62204]: DEBUG nova.compute.manager [None req-0ab01198-7319-45e2-bb3d-f3152b114f85 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleting image 8831d989-636e-474d-a1bb-9b95868f4ba9 {{(pid=62204) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4567}} [ 833.128071] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 833.130614] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Successfully created port: 0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.154876] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 833.155412] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 833.155412] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.155560] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 833.155667] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.155854] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 833.156330] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 833.156392] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 833.159375] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 833.159375] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 833.159375] env[62204]: DEBUG nova.virt.hardware [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 833.159375] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212e6e26-022c-4e32-985e-e67a058fecfb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.174927] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6443cea9-bcb8-423a-bec2-9a3d7b7232a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.238431] env[62204]: DEBUG nova.scheduler.client.report [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 833.287075] env[62204]: DEBUG nova.network.neutron [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updating instance_info_cache with network_info: [{"id": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "address": "fa:16:3e:df:a6:1a", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21c10daf-76", "ovs_interfaceid": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.537132] env[62204]: DEBUG nova.network.neutron [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.542100] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199821, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.747021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.747021] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 833.747520] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.045s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.748918] env[62204]: INFO nova.compute.claims [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.791781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Releasing lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.794513] env[62204]: DEBUG nova.compute.manager [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 833.795395] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3663af70-19f9-4d3c-b0fd-841e3f3019f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.006634] env[62204]: DEBUG nova.network.neutron [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Updating instance_info_cache with network_info: [{"id": "07d4d455-7847-4302-9485-d456629057e7", "address": "fa:16:3e:17:5f:22", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d4d455-78", "ovs_interfaceid": "07d4d455-7847-4302-9485-d456629057e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.044030] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199821, 'name': ReconfigVM_Task, 'duration_secs': 1.223555} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.044991] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Reconfigured VM instance instance-00000045 to attach disk [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.045915] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875d7fb0-2bbf-4d0c-885b-9a25a9a84ca3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.072815] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bad0f0fc-89b8-45c9-9fcd-1d7260474ff8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.090748] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 834.090748] env[62204]: value = "task-1199822" [ 834.090748] env[62204]: _type = "Task" [ 834.090748] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.099680] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.258215] env[62204]: DEBUG nova.compute.utils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.263024] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 834.263024] env[62204]: DEBUG nova.network.neutron [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 834.385452] env[62204]: DEBUG nova.policy [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7ef4b220ef748138fa00b468bd2652c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d5a571a94f443f5b9d26a4cde11a86c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 834.509898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.509956] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Instance network_info: |[{"id": "07d4d455-7847-4302-9485-d456629057e7", "address": "fa:16:3e:17:5f:22", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d4d455-78", "ovs_interfaceid": "07d4d455-7847-4302-9485-d456629057e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 834.510292] env[62204]: DEBUG oslo_concurrency.lockutils [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] Acquired lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.510520] env[62204]: DEBUG nova.network.neutron [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Refreshing network info cache for port 07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 834.511738] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:5f:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07d4d455-7847-4302-9485-d456629057e7', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.521771] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Creating folder: Project (ec87b51786754b05aa75abb818bdbc15). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 834.522569] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0ea01b31-5499-4767-8d01-9e1563b1b974 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.538172] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Created folder: Project (ec87b51786754b05aa75abb818bdbc15) in parent group-v259933. [ 834.538423] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Creating folder: Instances. Parent ref: group-v260052. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 834.538723] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-061dce2b-c389-4de3-83ba-32d0deac8591 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.560760] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Created folder: Instances in parent group-v260052. [ 834.562900] env[62204]: DEBUG oslo.service.loopingcall [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.562900] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b728904-19ef-4773-9260-c615da522801] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.562900] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48c29831-beaf-47b8-a17c-cc9a1c16d12b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.620226] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199822, 'name': ReconfigVM_Task, 'duration_secs': 0.505226} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.620789] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.620789] env[62204]: value = "task-1199825" [ 834.620789] env[62204]: _type = "Task" [ 834.620789] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.621160] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.621261] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab3e24dd-d012-40a7-8372-8b2248a75f32 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.643046] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199825, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.645159] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 834.645159] env[62204]: value = "task-1199826" [ 834.645159] env[62204]: _type = "Task" [ 834.645159] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.657137] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199826, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.766024] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 834.816756] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0627c21f-6dc2-406a-b8dd-65f1055fef1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.831477] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Doing hard reboot of VM {{(pid=62204) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 834.831719] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-5665def4-753c-40a6-82cb-bf33f9a063fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.842062] env[62204]: DEBUG oslo_vmware.api [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 834.842062] env[62204]: value = "task-1199827" [ 834.842062] env[62204]: _type = "Task" [ 834.842062] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.852378] env[62204]: DEBUG oslo_vmware.api [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199827, 'name': ResetVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.046479] env[62204]: DEBUG nova.network.neutron [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Successfully created port: 71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 835.137992] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199825, 'name': CreateVM_Task, 'duration_secs': 0.423397} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.138297] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b728904-19ef-4773-9260-c615da522801] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.139624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.139624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.139624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 835.139904] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b64f9cd-9e10-4d1c-b334-9d25f4ccf65b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.156342] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 835.156342] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d3fa56-da0b-64bf-1f9d-58bf41f2f6cd" [ 835.156342] env[62204]: _type = "Task" [ 835.156342] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.165605] env[62204]: DEBUG oslo_vmware.api [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199826, 'name': PowerOnVM_Task, 'duration_secs': 0.495549} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.166617] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.173318] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d3fa56-da0b-64bf-1f9d-58bf41f2f6cd, 'name': SearchDatastore_Task, 'duration_secs': 0.011199} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.174415] env[62204]: DEBUG nova.compute.manager [None req-6d39a842-3a4a-465d-80d4-616d22a39f48 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 835.174765] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.175122] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 835.175227] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.175370] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.175550] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 835.176340] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591e348c-5479-4dc6-ba6c-3b494c6553b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.183313] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57a796b2-c6e3-4c3c-b883-d8f7d3bfc0d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.196657] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 835.196657] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 835.196657] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a60f8e4f-20cc-4b89-9ffe-e0b77c3d7dee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.202545] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 835.202545] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526da287-3cb0-7804-0fb9-11291b309815" [ 835.202545] env[62204]: _type = "Task" [ 835.202545] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.215129] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526da287-3cb0-7804-0fb9-11291b309815, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.322311] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53425ff6-7cb0-4191-89dd-2dd4e58336cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.333822] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55bb320-d3b9-4807-b185-1fc2ada68538 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.377991] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f96dcd-39e1-4f2c-a3eb-068efeab3f88 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.390461] env[62204]: DEBUG nova.network.neutron [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Updated VIF entry in instance network info cache for port 07d4d455-7847-4302-9485-d456629057e7. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 835.390832] env[62204]: DEBUG nova.network.neutron [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Updating instance_info_cache with network_info: [{"id": "07d4d455-7847-4302-9485-d456629057e7", "address": "fa:16:3e:17:5f:22", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d4d455-78", "ovs_interfaceid": "07d4d455-7847-4302-9485-d456629057e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.393499] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57088c17-2b83-4c56-8ba4-b65634949fe7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.399516] env[62204]: DEBUG oslo_vmware.api [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199827, 'name': ResetVM_Task, 'duration_secs': 0.099278} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.399516] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Did hard reboot of VM {{(pid=62204) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 835.399516] env[62204]: DEBUG nova.compute.manager [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 835.399812] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4740877-55fe-4d30-b631-6b217a0a3026 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.412981] env[62204]: DEBUG nova.compute.provider_tree [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 835.715347] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526da287-3cb0-7804-0fb9-11291b309815, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.716650] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-763b7a67-b61e-434c-a574-20c1f23c9bce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.724559] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 835.724559] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f02052-2969-39b7-3b54-13338f2310dc" [ 835.724559] env[62204]: _type = "Task" [ 835.724559] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.742085] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f02052-2969-39b7-3b54-13338f2310dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.778809] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 835.809018] env[62204]: DEBUG nova.virt.hardware [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 835.809600] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa1143c-1beb-4f25-be1e-86d883975529 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.818835] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684cdb0a-3d8c-4b72-ad9e-90edd44e7675 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.838364] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Successfully updated port: d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.899194] env[62204]: DEBUG oslo_concurrency.lockutils [req-40dd73ca-9d9f-4d8a-9e63-3e28694da3d6 req-f8e6ce38-5e50-4f7b-90dd-ac00aa63a065 service nova] Releasing lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.923668] env[62204]: DEBUG oslo_concurrency.lockutils [None req-8d063eac-aa4f-4a4e-a817-04f89c9e788d tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.227s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.940428] env[62204]: ERROR nova.scheduler.client.report [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [req-646cce38-84f3-4a75-84b1-93d98d21c0e6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-646cce38-84f3-4a75-84b1-93d98d21c0e6"}]} [ 835.964155] env[62204]: DEBUG nova.scheduler.client.report [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Refreshing inventories for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 835.981592] env[62204]: DEBUG nova.compute.manager [req-3c76271c-17ed-433f-9490-f446842f9397 req-07894660-53f3-44db-baaf-3ce1fdb25483 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received event network-vif-plugged-d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.981823] env[62204]: DEBUG oslo_concurrency.lockutils [req-3c76271c-17ed-433f-9490-f446842f9397 req-07894660-53f3-44db-baaf-3ce1fdb25483 service nova] Acquiring lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.982704] env[62204]: DEBUG oslo_concurrency.lockutils [req-3c76271c-17ed-433f-9490-f446842f9397 req-07894660-53f3-44db-baaf-3ce1fdb25483 service nova] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.982704] env[62204]: DEBUG oslo_concurrency.lockutils [req-3c76271c-17ed-433f-9490-f446842f9397 req-07894660-53f3-44db-baaf-3ce1fdb25483 service nova] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.982704] env[62204]: DEBUG nova.compute.manager [req-3c76271c-17ed-433f-9490-f446842f9397 req-07894660-53f3-44db-baaf-3ce1fdb25483 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] No waiting events found dispatching network-vif-plugged-d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 835.982704] env[62204]: WARNING nova.compute.manager [req-3c76271c-17ed-433f-9490-f446842f9397 req-07894660-53f3-44db-baaf-3ce1fdb25483 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received unexpected event network-vif-plugged-d5dbefd9-695f-4a85-859d-9c9d8cb6632b for instance with vm_state building and task_state spawning. [ 835.983848] env[62204]: DEBUG nova.scheduler.client.report [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updating ProviderTree inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 835.984095] env[62204]: DEBUG nova.compute.provider_tree [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 836.001748] env[62204]: DEBUG nova.scheduler.client.report [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Refreshing aggregate associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, aggregates: None {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 836.023484] env[62204]: DEBUG nova.scheduler.client.report [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Refreshing trait associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 836.247178] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f02052-2969-39b7-3b54-13338f2310dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011545} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.247438] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.248133] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2b728904-19ef-4773-9260-c615da522801/2b728904-19ef-4773-9260-c615da522801.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 836.249701] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34bcd344-1cac-435b-9434-f9b63c76164f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.262877] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 836.262877] env[62204]: value = "task-1199828" [ 836.262877] env[62204]: _type = "Task" [ 836.262877] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.280808] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199828, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.495050] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c96a8e-c619-41e6-950f-45c775523024 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.505891] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0735dd1-533a-45f9-b0f4-d8d19d091159 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.545302] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1544c3e1-d881-4042-83a5-538b75248454 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.557331] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e54ea3-70c4-41e3-b441-fc77fed1b2df {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.576663] env[62204]: DEBUG nova.compute.provider_tree [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 836.775838] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199828, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470693} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.776153] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2b728904-19ef-4773-9260-c615da522801/2b728904-19ef-4773-9260-c615da522801.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.776637] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.776637] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9aa1c745-79a1-4b9d-a11e-28832d9a043e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.784916] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 836.784916] env[62204]: value = "task-1199829" [ 836.784916] env[62204]: _type = "Task" [ 836.784916] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.794664] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199829, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.142040] env[62204]: DEBUG nova.scheduler.client.report [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 94 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 837.142040] env[62204]: DEBUG nova.compute.provider_tree [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 94 to 95 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 837.142040] env[62204]: DEBUG nova.compute.provider_tree [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 837.298303] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199829, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070496} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.302102] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 837.302102] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abafa974-400a-43ef-af1f-284236dfc4c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.325516] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 2b728904-19ef-4773-9260-c615da522801/2b728904-19ef-4773-9260-c615da522801.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.328119] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fed6fdda-b847-4658-8c2e-1ac6b1795bc6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.351329] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 837.351329] env[62204]: value = "task-1199830" [ 837.351329] env[62204]: _type = "Task" [ 837.351329] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.353655] env[62204]: DEBUG nova.network.neutron [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Successfully updated port: 71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 837.363793] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199830, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.652714] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.905s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.655016] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 837.656951] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.949s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.659480] env[62204]: INFO nova.compute.claims [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.862109] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "refresh_cache-7412d7ef-b370-4253-8d57-d2bd5d06d6a9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.862410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired lock "refresh_cache-7412d7ef-b370-4253-8d57-d2bd5d06d6a9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.862410] env[62204]: DEBUG nova.network.neutron [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.863650] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199830, 'name': ReconfigVM_Task, 'duration_secs': 0.296256} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.864171] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 2b728904-19ef-4773-9260-c615da522801/2b728904-19ef-4773-9260-c615da522801.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.868477] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4239302-7f28-477f-a4f7-cf3601477946 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.873682] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 837.873682] env[62204]: value = "task-1199831" [ 837.873682] env[62204]: _type = "Task" [ 837.873682] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.885460] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199831, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.168425] env[62204]: DEBUG nova.compute.utils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.170174] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 838.170669] env[62204]: DEBUG nova.network.neutron [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 838.201658] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received event network-changed-d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.201658] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Refreshing instance network info cache due to event network-changed-d5dbefd9-695f-4a85-859d-9c9d8cb6632b. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 838.201658] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquiring lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.201658] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquired lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.201658] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Refreshing network info cache for port d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 838.320985] env[62204]: DEBUG nova.policy [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7ef4b220ef748138fa00b468bd2652c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9d5a571a94f443f5b9d26a4cde11a86c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 838.388635] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199831, 'name': Rename_Task, 'duration_secs': 0.165481} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.388635] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.388635] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19d33523-b6ca-4163-8e6a-f32af0c5e6d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.396515] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 838.396515] env[62204]: value = "task-1199832" [ 838.396515] env[62204]: _type = "Task" [ 838.396515] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.401583] env[62204]: DEBUG nova.network.neutron [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.413022] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.538325] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "f445a8ea-ff21-44e9-8389-231a03c51650" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.538325] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "f445a8ea-ff21-44e9-8389-231a03c51650" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.556246] env[62204]: DEBUG nova.network.neutron [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Updating instance_info_cache with network_info: [{"id": "71f7fdd0-1ed7-463d-86be-055c23851225", "address": "fa:16:3e:44:17:fc", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71f7fdd0-1e", "ovs_interfaceid": "71f7fdd0-1ed7-463d-86be-055c23851225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.645057] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.645342] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.645558] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "25563dec-7e4d-42d9-b922-0b2354b5d70e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.645778] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.645993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.648584] env[62204]: INFO nova.compute.manager [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Terminating instance [ 838.650422] env[62204]: DEBUG nova.compute.manager [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 838.650631] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.652845] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc6cbf5-2f6e-43c3-96f5-71a70fdad70a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.661249] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.662312] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-360496a7-ee37-425d-86df-362eb7b13327 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.670908] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 838.673552] env[62204]: DEBUG oslo_vmware.api [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 838.673552] env[62204]: value = "task-1199833" [ 838.673552] env[62204]: _type = "Task" [ 838.673552] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.685847] env[62204]: DEBUG oslo_vmware.api [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.745041] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.747119] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.747364] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.747544] env[62204]: DEBUG nova.compute.manager [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 838.748544] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e56d1ad-9df4-4cde-8591-2d899268efb4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.765141] env[62204]: DEBUG nova.compute.manager [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 838.765141] env[62204]: DEBUG nova.objects.instance [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'flavor' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.841095] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.914408] env[62204]: DEBUG oslo_vmware.api [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199832, 'name': PowerOnVM_Task, 'duration_secs': 0.482986} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.918188] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.918391] env[62204]: INFO nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Took 8.38 seconds to spawn the instance on the hypervisor. [ 838.918574] env[62204]: DEBUG nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 838.920153] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10accb2-89ac-49bd-984e-7fdeef80fde6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.005189] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Successfully updated port: 0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.064897] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Releasing lock "refresh_cache-7412d7ef-b370-4253-8d57-d2bd5d06d6a9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.065565] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Instance network_info: |[{"id": "71f7fdd0-1ed7-463d-86be-055c23851225", "address": "fa:16:3e:44:17:fc", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71f7fdd0-1e", "ovs_interfaceid": "71f7fdd0-1ed7-463d-86be-055c23851225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 839.065852] env[62204]: INFO nova.compute.manager [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Rescuing [ 839.066025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.066216] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.066347] env[62204]: DEBUG nova.network.neutron [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.071958] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:17:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfbfc55d-8126-40dd-998e-8600ea92f97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71f7fdd0-1ed7-463d-86be-055c23851225', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 839.084018] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating folder: Project (9d5a571a94f443f5b9d26a4cde11a86c). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.084018] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4b6475b-a010-4d42-a2be-a0fe5dc17a5b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.831097] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Releasing lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.831398] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Received event network-changed-21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 839.831541] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Refreshing instance network info cache due to event network-changed-21c10daf-76af-4fd9-8681-58fdf9ea566f. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 839.831746] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquiring lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.831887] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquired lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.832062] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Refreshing network info cache for port 21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 839.837270] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.841024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.841024] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.844554] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.846461] env[62204]: DEBUG nova.network.neutron [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Successfully created port: 71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 839.856479] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78969d91-fcc3-47b0-b773-590b2759fcb7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.858399] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Created folder: Project (9d5a571a94f443f5b9d26a4cde11a86c) in parent group-v259933. [ 839.858588] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating folder: Instances. Parent ref: group-v260055. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 839.860101] env[62204]: INFO nova.compute.manager [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Took 38.02 seconds to build instance. [ 839.862530] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a62e6fc9-0d8f-4d39-9279-f78a7a048794 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.877612] env[62204]: DEBUG oslo_vmware.api [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199833, 'name': PowerOffVM_Task, 'duration_secs': 0.343467} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.878466] env[62204]: DEBUG oslo_vmware.api [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 839.878466] env[62204]: value = "task-1199835" [ 839.878466] env[62204]: _type = "Task" [ 839.878466] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.879827] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.880130] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.884541] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b89e30f8-5f97-4103-96d9-881e05a2ac31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.890088] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Created folder: Instances in parent group-v260055. [ 839.890088] env[62204]: DEBUG oslo.service.loopingcall [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.890534] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 839.891687] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-43a2e8ba-3902-4827-b29c-02d29d619934 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.913545] env[62204]: DEBUG oslo_vmware.api [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199835, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.920607] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 839.920607] env[62204]: value = "task-1199838" [ 839.920607] env[62204]: _type = "Task" [ 839.920607] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.940739] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199838, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.991775] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.991775] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.991775] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Deleting the datastore file [datastore2] 25563dec-7e4d-42d9-b922-0b2354b5d70e {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.991775] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b067fdf-8006-4d03-82a6-94d406ab0ad2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.994554] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a625457-5da2-4759-864f-a5f20bb304d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.006182] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd2b958-cc2a-4fde-8119-c665fcda477f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.012137] env[62204]: DEBUG oslo_vmware.api [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 840.012137] env[62204]: value = "task-1199839" [ 840.012137] env[62204]: _type = "Task" [ 840.012137] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.049044] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7281d048-fc61-466e-a702-b4889c2684a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.051716] env[62204]: DEBUG oslo_vmware.api [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.060017] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79f96f1-0bff-4836-b4b8-10d5aa4c587f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.071925] env[62204]: DEBUG nova.compute.provider_tree [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.352357] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 840.361573] env[62204]: INFO nova.compute.manager [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Rescuing [ 840.361573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.361573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.361745] env[62204]: DEBUG nova.network.neutron [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.366825] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b687088f-d70c-419a-9981-3cfcd4f0f9df tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "2b728904-19ef-4773-9260-c615da522801" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.151s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.399039] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.399175] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.399443] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.399583] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.400498] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.400498] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.400498] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.400498] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.400498] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.400873] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.400873] env[62204]: DEBUG nova.virt.hardware [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.403928] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c994678-2e81-4049-8bbc-c86913b3e684 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.410833] env[62204]: DEBUG nova.compute.manager [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received event network-vif-plugged-0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.411072] env[62204]: DEBUG oslo_concurrency.lockutils [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] Acquiring lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.411289] env[62204]: DEBUG oslo_concurrency.lockutils [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.411458] env[62204]: DEBUG oslo_concurrency.lockutils [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.411867] env[62204]: DEBUG nova.compute.manager [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] No waiting events found dispatching network-vif-plugged-0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 840.411867] env[62204]: WARNING nova.compute.manager [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received unexpected event network-vif-plugged-0c5b107a-29a0-425e-81cd-4dae8593ec01 for instance with vm_state building and task_state spawning. [ 840.411981] env[62204]: DEBUG nova.compute.manager [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received event network-changed-0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.416019] env[62204]: DEBUG nova.compute.manager [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Refreshing instance network info cache due to event network-changed-0c5b107a-29a0-425e-81cd-4dae8593ec01. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 840.416019] env[62204]: DEBUG oslo_concurrency.lockutils [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] Acquiring lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.416019] env[62204]: DEBUG oslo_vmware.api [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199835, 'name': PowerOffVM_Task, 'duration_secs': 0.287164} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.416019] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 840.416019] env[62204]: DEBUG nova.compute.manager [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 840.416019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8beaf4c-7b68-43d1-a323-2fb2e1a103e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.421386] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb2c88a-96ab-45ee-b5c0-adb017244be9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.444865] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199838, 'name': CreateVM_Task, 'duration_secs': 0.367978} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.445071] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 840.445824] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.446019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.446350] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 840.447212] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a4f28c-bd82-4026-aaac-dc2626257543 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.451781] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 840.451781] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528163fa-fbbc-b3a3-3801-8b2fc2d296f2" [ 840.451781] env[62204]: _type = "Task" [ 840.451781] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.455014] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.461693] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528163fa-fbbc-b3a3-3801-8b2fc2d296f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.499546] env[62204]: DEBUG nova.network.neutron [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updating instance_info_cache with network_info: [{"id": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "address": "fa:16:3e:32:08:82", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af1ae4e-3a", "ovs_interfaceid": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.522179] env[62204]: DEBUG oslo_vmware.api [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176226} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.524338] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.524581] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.524802] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.525038] env[62204]: INFO nova.compute.manager [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Took 1.87 seconds to destroy the instance on the hypervisor. [ 840.525319] env[62204]: DEBUG oslo.service.loopingcall [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.525581] env[62204]: DEBUG nova.compute.manager [-] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 840.525705] env[62204]: DEBUG nova.network.neutron [-] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 840.575087] env[62204]: DEBUG nova.scheduler.client.report [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.718778] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updated VIF entry in instance network info cache for port 21c10daf-76af-4fd9-8681-58fdf9ea566f. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 840.719283] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updating instance_info_cache with network_info: [{"id": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "address": "fa:16:3e:df:a6:1a", "network": {"id": "438a254f-30fa-4c6f-842b-91f349c68284", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-2114608063-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "286b300e98e244eb8693bb0f3174c121", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21c10daf-76", "ovs_interfaceid": "21c10daf-76af-4fd9-8681-58fdf9ea566f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.849967] env[62204]: DEBUG nova.network.neutron [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Updating instance_info_cache with network_info: [{"id": "d5dbefd9-695f-4a85-859d-9c9d8cb6632b", "address": "fa:16:3e:1d:82:b0", "network": {"id": "1a0a11ef-a798-4089-bac5-1fa4ae98ecd9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1295231842", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d903c404-a23a-40c0-a217-96d4bb2e5b08", "external-id": "nsx-vlan-transportzone-228", "segmentation_id": 228, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5dbefd9-69", "ovs_interfaceid": "d5dbefd9-695f-4a85-859d-9c9d8cb6632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "address": "fa:16:3e:20:82:80", "network": {"id": "f5131c66-1079-42b3-af97-2dd5d171b3ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37756722", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c5b107a-29", "ovs_interfaceid": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.869485] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 840.939512] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9c131a45-e456-4be2-8cfa-28c75af3f77f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.192s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.966666] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528163fa-fbbc-b3a3-3801-8b2fc2d296f2, 'name': SearchDatastore_Task, 'duration_secs': 0.025872} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.967315] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.967587] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 840.967674] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.967917] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.968160] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 840.968481] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca7e486a-58e8-4623-b1a0-c98c5b21b3e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.980942] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 840.981207] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 840.981964] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43eced00-f1e4-4ec5-bed7-330c0913127e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.987971] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 840.987971] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c83f7a-f0fe-52bd-71ec-380e931023b8" [ 840.987971] env[62204]: _type = "Task" [ 840.987971] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.996612] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c83f7a-f0fe-52bd-71ec-380e931023b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.002288] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.082883] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.426s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.083992] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 841.086227] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.452s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.088086] env[62204]: INFO nova.compute.claims [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.106749] env[62204]: DEBUG nova.network.neutron [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Updating instance_info_cache with network_info: [{"id": "07d4d455-7847-4302-9485-d456629057e7", "address": "fa:16:3e:17:5f:22", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d4d455-78", "ovs_interfaceid": "07d4d455-7847-4302-9485-d456629057e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.222445] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Releasing lock "refresh_cache-25563dec-7e4d-42d9-b922-0b2354b5d70e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.222709] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Received event network-vif-plugged-71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.222918] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquiring lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.223149] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.223321] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.223866] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] No waiting events found dispatching network-vif-plugged-71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 841.224178] env[62204]: WARNING nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Received unexpected event network-vif-plugged-71f7fdd0-1ed7-463d-86be-055c23851225 for instance with vm_state building and task_state spawning. [ 841.224421] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Received event network-changed-71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 841.224665] env[62204]: DEBUG nova.compute.manager [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Refreshing instance network info cache due to event network-changed-71f7fdd0-1ed7-463d-86be-055c23851225. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 841.224942] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquiring lock "refresh_cache-7412d7ef-b370-4253-8d57-d2bd5d06d6a9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.225157] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Acquired lock "refresh_cache-7412d7ef-b370-4253-8d57-d2bd5d06d6a9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.225375] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Refreshing network info cache for port 71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.353041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Releasing lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.353258] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Instance network_info: |[{"id": "d5dbefd9-695f-4a85-859d-9c9d8cb6632b", "address": "fa:16:3e:1d:82:b0", "network": {"id": "1a0a11ef-a798-4089-bac5-1fa4ae98ecd9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1295231842", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d903c404-a23a-40c0-a217-96d4bb2e5b08", "external-id": "nsx-vlan-transportzone-228", "segmentation_id": 228, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5dbefd9-69", "ovs_interfaceid": "d5dbefd9-695f-4a85-859d-9c9d8cb6632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "address": "fa:16:3e:20:82:80", "network": {"id": "f5131c66-1079-42b3-af97-2dd5d171b3ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37756722", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c5b107a-29", "ovs_interfaceid": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 841.353604] env[62204]: DEBUG oslo_concurrency.lockutils [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] Acquired lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.353852] env[62204]: DEBUG nova.network.neutron [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Refreshing network info cache for port 0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.355540] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:82:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd903c404-a23a-40c0-a217-96d4bb2e5b08', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5dbefd9-695f-4a85-859d-9c9d8cb6632b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:82:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '82ca17df-257e-40e6-9ec9-310ed6f05ccb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0c5b107a-29a0-425e-81cd-4dae8593ec01', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 841.364895] env[62204]: DEBUG oslo.service.loopingcall [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 841.366048] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 841.366339] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78100de4-22e0-4441-82e3-b7007d5bdf96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.393021] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 841.393021] env[62204]: value = "task-1199840" [ 841.393021] env[62204]: _type = "Task" [ 841.393021] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.402117] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199840, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.404468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.500879] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c83f7a-f0fe-52bd-71ec-380e931023b8, 'name': SearchDatastore_Task, 'duration_secs': 0.013946} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.501884] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d3403e0-54bb-4b3c-8bf5-438bc2b68026 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.508551] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 841.508551] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524a1f16-2af7-0181-a52b-7fd4b2f1a41a" [ 841.508551] env[62204]: _type = "Task" [ 841.508551] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.512751] env[62204]: DEBUG nova.network.neutron [-] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.520769] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524a1f16-2af7-0181-a52b-7fd4b2f1a41a, 'name': SearchDatastore_Task, 'duration_secs': 0.009524} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.521055] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.521514] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 7412d7ef-b370-4253-8d57-d2bd5d06d6a9/7412d7ef-b370-4253-8d57-d2bd5d06d6a9.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 841.521514] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09c40822-4aea-406a-9c69-696c98a5db65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.528951] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 841.528951] env[62204]: value = "task-1199841" [ 841.528951] env[62204]: _type = "Task" [ 841.528951] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.537342] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.541723] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.541989] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbe32bd3-c622-46f8-b61c-4d83da6ce0bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.548554] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 841.548554] env[62204]: value = "task-1199842" [ 841.548554] env[62204]: _type = "Task" [ 841.548554] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.557402] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.595443] env[62204]: DEBUG nova.compute.utils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.597439] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 841.597628] env[62204]: DEBUG nova.network.neutron [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 841.609118] env[62204]: DEBUG oslo_concurrency.lockutils [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "refresh_cache-2b728904-19ef-4773-9260-c615da522801" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 841.650041] env[62204]: DEBUG nova.policy [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '482111f8541e40cb91ab69a363845043', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7f1dbef99d9946d58fbe59f2850f6c63', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 841.904179] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199840, 'name': CreateVM_Task, 'duration_secs': 0.407609} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.904498] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.905526] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.905526] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.906044] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 841.906365] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed45e2c3-8dc9-47b2-b480-4a099fa4e005 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.912120] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 841.912120] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5227155d-6c27-1bea-a6b2-f1fbe0b4e7bb" [ 841.912120] env[62204]: _type = "Task" [ 841.912120] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.922216] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5227155d-6c27-1bea-a6b2-f1fbe0b4e7bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.016705] env[62204]: INFO nova.compute.manager [-] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Took 1.49 seconds to deallocate network for instance. [ 842.040328] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199841, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.063340] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199842, 'name': PowerOffVM_Task, 'duration_secs': 0.195879} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.063834] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.064831] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84dee44-d060-4d23-a97c-b9110eff7a68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.093898] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e78424-6ef4-4b34-a9a1-e14cd1ba6351 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.103325] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 842.152728] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.155149] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f182bace-0424-41ff-8685-680e7bd51933 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.161288] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.163037] env[62204]: DEBUG nova.network.neutron [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Successfully created port: fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.165484] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddf10f3f-adb7-4c58-b11b-2025fcb27367 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.176117] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 842.176117] env[62204]: value = "task-1199843" [ 842.176117] env[62204]: _type = "Task" [ 842.176117] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.180468] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 842.180468] env[62204]: value = "task-1199844" [ 842.180468] env[62204]: _type = "Task" [ 842.180468] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.192771] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.202035] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 842.202035] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.202035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.202035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.202035] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.202035] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb6eb0e4-4db3-4a8c-bf15-c104cfebd90b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.208320] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.208532] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.209609] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c89cf2ea-caeb-4a02-af4d-3fe9546eb6d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.221402] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 842.221402] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bb115f-b5fc-02d0-7c84-8bf76f8b4301" [ 842.221402] env[62204]: _type = "Task" [ 842.221402] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.231298] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bb115f-b5fc-02d0-7c84-8bf76f8b4301, 'name': SearchDatastore_Task, 'duration_secs': 0.008517} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.234767] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d479ee26-2913-4bed-9729-a1ba3deb291b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.240894] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 842.240894] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527f932b-1cee-4e82-ad43-bf01e8ce3d3c" [ 842.240894] env[62204]: _type = "Task" [ 842.240894] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.249067] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527f932b-1cee-4e82-ad43-bf01e8ce3d3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.374950] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Updated VIF entry in instance network info cache for port 71f7fdd0-1ed7-463d-86be-055c23851225. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 842.375707] env[62204]: DEBUG nova.network.neutron [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Updating instance_info_cache with network_info: [{"id": "71f7fdd0-1ed7-463d-86be-055c23851225", "address": "fa:16:3e:44:17:fc", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71f7fdd0-1e", "ovs_interfaceid": "71f7fdd0-1ed7-463d-86be-055c23851225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.398162] env[62204]: DEBUG nova.network.neutron [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Updated VIF entry in instance network info cache for port 0c5b107a-29a0-425e-81cd-4dae8593ec01. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 842.398817] env[62204]: DEBUG nova.network.neutron [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Updating instance_info_cache with network_info: [{"id": "d5dbefd9-695f-4a85-859d-9c9d8cb6632b", "address": "fa:16:3e:1d:82:b0", "network": {"id": "1a0a11ef-a798-4089-bac5-1fa4ae98ecd9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1295231842", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.36", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d903c404-a23a-40c0-a217-96d4bb2e5b08", "external-id": "nsx-vlan-transportzone-228", "segmentation_id": 228, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5dbefd9-69", "ovs_interfaceid": "d5dbefd9-695f-4a85-859d-9c9d8cb6632b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "address": "fa:16:3e:20:82:80", "network": {"id": "f5131c66-1079-42b3-af97-2dd5d171b3ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37756722", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c5b107a-29", "ovs_interfaceid": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.425296] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5227155d-6c27-1bea-a6b2-f1fbe0b4e7bb, 'name': SearchDatastore_Task, 'duration_secs': 0.066225} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.425613] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.425875] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.426144] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.426313] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.426473] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.426735] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac6f5fc5-8db1-4c0e-9b33-f3f8820d9fc0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.434821] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.435026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.435715] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b762dbe6-b8bb-4449-983c-c809030e7c1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.441625] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 842.441625] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a7f983-d5d0-ec80-fc7d-ae5595fa5854" [ 842.441625] env[62204]: _type = "Task" [ 842.441625] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.453129] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a7f983-d5d0-ec80-fc7d-ae5595fa5854, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.456189] env[62204]: DEBUG nova.network.neutron [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Successfully updated port: 71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.523450] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.547242] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199841, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520143} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.547583] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 7412d7ef-b370-4253-8d57-d2bd5d06d6a9/7412d7ef-b370-4253-8d57-d2bd5d06d6a9.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.547867] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.548261] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c481c62-f9c4-41f6-a890-e7aa33b433b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.557745] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 842.557745] env[62204]: value = "task-1199845" [ 842.557745] env[62204]: _type = "Task" [ 842.557745] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.567527] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.589746] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8389bcfd-3e20-4f16-8e81-67f3f0b307d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.598020] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36bd4311-7912-4db6-a073-86f7af373c6d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.648857] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42643d98-d5f1-438f-afe2-4446ab14c5ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.656636] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4b0708-0b3f-4122-9378-54d09aa185bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.675648] env[62204]: DEBUG nova.compute.provider_tree [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.686738] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199843, 'name': PowerOffVM_Task, 'duration_secs': 0.198375} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.686738] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.687381] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9e398f-d8d7-4301-ac51-5332c7c3b0a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.706706] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f9dfe7-1711-4c7f-8f3b-2d63d97f28cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.743151] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.743151] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-611e6df1-adfc-4131-af8c-71f85e6574d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.750891] env[62204]: DEBUG nova.compute.manager [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Received event network-vif-deleted-21c10daf-76af-4fd9-8681-58fdf9ea566f {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.750891] env[62204]: DEBUG nova.compute.manager [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Received event network-vif-plugged-71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 842.750891] env[62204]: DEBUG oslo_concurrency.lockutils [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] Acquiring lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 842.750891] env[62204]: DEBUG oslo_concurrency.lockutils [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.750891] env[62204]: DEBUG oslo_concurrency.lockutils [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.750891] env[62204]: DEBUG nova.compute.manager [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] No waiting events found dispatching network-vif-plugged-71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 842.750891] env[62204]: WARNING nova.compute.manager [req-88e77cc9-fb88-4ac9-8954-9de25eeaa7ab req-287d6d58-0bbd-450d-9561-4a3ac47d0998 service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Received unexpected event network-vif-plugged-71d3f404-a411-4f5e-93a0-ceb7817ec80b for instance with vm_state building and task_state spawning. [ 842.753231] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 842.753231] env[62204]: value = "task-1199846" [ 842.753231] env[62204]: _type = "Task" [ 842.753231] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.758960] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527f932b-1cee-4e82-ad43-bf01e8ce3d3c, 'name': SearchDatastore_Task, 'duration_secs': 0.008798} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.761771] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.762114] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. {{(pid=62204) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 842.762521] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b7d7f4c-fd82-4977-8279-b7def7bfcd2b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.772858] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 842.773101] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.773355] env[62204]: DEBUG oslo_concurrency.lockutils [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.773506] env[62204]: DEBUG oslo_concurrency.lockutils [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.777020] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.777020] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 842.777020] env[62204]: value = "task-1199847" [ 842.777020] env[62204]: _type = "Task" [ 842.777020] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.777020] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aac71cc9-53ae-4488-a391-e0a51d5d8c98 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.787660] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.793876] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.794084] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.794801] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dd19e6f-50d5-465c-b55d-a297ce97cc67 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.801094] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 842.801094] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5252d326-9008-4a22-9fd9-8b5a61c4b399" [ 842.801094] env[62204]: _type = "Task" [ 842.801094] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.809551] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5252d326-9008-4a22-9fd9-8b5a61c4b399, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.878134] env[62204]: DEBUG oslo_concurrency.lockutils [req-e4280b29-f44e-4924-823e-99a0d3c93b08 req-8ac18e7e-c547-4a70-846d-6fc2c768222d service nova] Releasing lock "refresh_cache-7412d7ef-b370-4253-8d57-d2bd5d06d6a9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.901560] env[62204]: DEBUG oslo_concurrency.lockutils [req-95a59cf9-ee68-4b08-8ca9-d5b995763a12 req-1ea5ef99-7b88-4720-bf88-afc0663ea084 service nova] Releasing lock "refresh_cache-a2a37a1b-3ef0-4be7-924c-66c7a1583b68" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.954044] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a7f983-d5d0-ec80-fc7d-ae5595fa5854, 'name': SearchDatastore_Task, 'duration_secs': 0.009946} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.954729] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04fed3e2-8618-473a-a47c-10afeb501c15 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.959244] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "refresh_cache-431e7b20-22d8-4742-9c47-cdf9ee08fb32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.959405] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired lock "refresh_cache-431e7b20-22d8-4742-9c47-cdf9ee08fb32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.959559] env[62204]: DEBUG nova.network.neutron [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.962326] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 842.962326] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c8a67e-fb9b-eff7-f064-7ed040686734" [ 842.962326] env[62204]: _type = "Task" [ 842.962326] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.971124] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c8a67e-fb9b-eff7-f064-7ed040686734, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.068364] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078242} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.068899] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 843.069869] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b6d297-8d06-4bef-8c2b-5c965d017cd9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.093223] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 7412d7ef-b370-4253-8d57-d2bd5d06d6a9/7412d7ef-b370-4253-8d57-d2bd5d06d6a9.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.093571] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0009a7b3-f4b1-4144-8e76-957501ed0eeb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.115179] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 843.115179] env[62204]: value = "task-1199848" [ 843.115179] env[62204]: _type = "Task" [ 843.115179] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.124674] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199848, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.150198] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 843.182800] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 843.183203] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 843.183420] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.183690] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 843.184167] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.184445] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 843.184993] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 843.185201] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 843.185392] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 843.185571] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 843.185754] env[62204]: DEBUG nova.virt.hardware [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 843.189384] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a8fba2-3281-4805-a7f2-9b1bbc0f1ac8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.198665] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d7102b-5a3a-40b0-b299-0e488a554b38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.216856] env[62204]: ERROR nova.scheduler.client.report [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [req-5af89c81-121b-4d63-a1d9-c9cb2698eeeb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5af89c81-121b-4d63-a1d9-c9cb2698eeeb"}]} [ 843.234720] env[62204]: DEBUG nova.scheduler.client.report [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Refreshing inventories for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 843.250476] env[62204]: DEBUG nova.scheduler.client.report [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updating ProviderTree inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 843.250627] env[62204]: DEBUG nova.compute.provider_tree [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 843.262535] env[62204]: DEBUG nova.scheduler.client.report [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Refreshing aggregate associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, aggregates: None {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 843.281237] env[62204]: DEBUG nova.scheduler.client.report [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Refreshing trait associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 843.286793] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199847, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477072} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.287049] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. [ 843.287848] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9324e406-5e86-4f72-bd3e-40982873f629 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.315083] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.323747] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6237ae2-f5e6-4534-bedf-72f64b086acc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.338580] env[62204]: DEBUG nova.compute.manager [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Stashing vm_state: stopped {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 843.346081] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5252d326-9008-4a22-9fd9-8b5a61c4b399, 'name': SearchDatastore_Task, 'duration_secs': 0.00944} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.347987] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 843.347987] env[62204]: value = "task-1199849" [ 843.347987] env[62204]: _type = "Task" [ 843.347987] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.348212] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04813d3c-a3a6-4686-b8a7-bb17c7509af0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.359113] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 843.359113] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fb444e-2af8-ff42-f8e7-691599c81f17" [ 843.359113] env[62204]: _type = "Task" [ 843.359113] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.362066] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199849, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.371328] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fb444e-2af8-ff42-f8e7-691599c81f17, 'name': SearchDatastore_Task} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.371600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.371858] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2b728904-19ef-4773-9260-c615da522801/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. {{(pid=62204) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 843.372138] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63929c68-b6c1-4d83-bb3c-347455b9d970 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.379056] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 843.379056] env[62204]: value = "task-1199850" [ 843.379056] env[62204]: _type = "Task" [ 843.379056] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.387388] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.476589] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c8a67e-fb9b-eff7-f064-7ed040686734, 'name': SearchDatastore_Task, 'duration_secs': 0.021501} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.479392] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.479681] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] a2a37a1b-3ef0-4be7-924c-66c7a1583b68/a2a37a1b-3ef0-4be7-924c-66c7a1583b68.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.481114] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5576859b-da51-4fba-ba6e-9ef0adbc8333 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.487541] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 843.487541] env[62204]: value = "task-1199851" [ 843.487541] env[62204]: _type = "Task" [ 843.487541] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.498093] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199851, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.524055] env[62204]: DEBUG nova.network.neutron [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.635602] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.830443] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943a968a-870a-4522-91b1-fe3ea1e80137 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.840364] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496c4254-4ae4-44a3-a023-4d0acdb3b16e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.886261] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.893779] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c612402c-7ba2-4e5e-b011-420b836b8b79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.912193] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b642535-646a-4312-8f1f-02b29a70a7f0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.915492] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199849, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.915821] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457816} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.918727] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2b728904-19ef-4773-9260-c615da522801/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. [ 843.920313] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6fd9b4-83f0-40e5-a073-492adef35bb7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.933850] env[62204]: DEBUG nova.compute.provider_tree [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 843.961518] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 2b728904-19ef-4773-9260-c615da522801/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.962417] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffe0420c-1f93-47d5-8dee-7d04b490c03d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.984739] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 843.984739] env[62204]: value = "task-1199852" [ 843.984739] env[62204]: _type = "Task" [ 843.984739] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.998852] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199851, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.002764] env[62204]: DEBUG nova.network.neutron [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Successfully updated port: fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.004125] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199852, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.113414] env[62204]: DEBUG nova.network.neutron [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Updating instance_info_cache with network_info: [{"id": "71d3f404-a411-4f5e-93a0-ceb7817ec80b", "address": "fa:16:3e:4e:bf:0c", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71d3f404-a4", "ovs_interfaceid": "71d3f404-a411-4f5e-93a0-ceb7817ec80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.132335] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199848, 'name': ReconfigVM_Task, 'duration_secs': 0.664179} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.133515] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 7412d7ef-b370-4253-8d57-d2bd5d06d6a9/7412d7ef-b370-4253-8d57-d2bd5d06d6a9.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.134566] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94ed584f-28cd-4508-a0d9-95d2138f705f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.146901] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 844.146901] env[62204]: value = "task-1199853" [ 844.146901] env[62204]: _type = "Task" [ 844.146901] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.159638] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199853, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.210479] env[62204]: DEBUG nova.compute.manager [req-f548b5e7-5bfd-4600-8e89-d0613afb9fd7 req-35b49c8e-514e-436c-9e79-23d09c0ae33e service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Received event network-vif-plugged-fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.210802] env[62204]: DEBUG oslo_concurrency.lockutils [req-f548b5e7-5bfd-4600-8e89-d0613afb9fd7 req-35b49c8e-514e-436c-9e79-23d09c0ae33e service nova] Acquiring lock "031cb3ff-4a80-4961-a399-de31fc72e65b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 844.211069] env[62204]: DEBUG oslo_concurrency.lockutils [req-f548b5e7-5bfd-4600-8e89-d0613afb9fd7 req-35b49c8e-514e-436c-9e79-23d09c0ae33e service nova] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.211559] env[62204]: DEBUG oslo_concurrency.lockutils [req-f548b5e7-5bfd-4600-8e89-d0613afb9fd7 req-35b49c8e-514e-436c-9e79-23d09c0ae33e service nova] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.211719] env[62204]: DEBUG nova.compute.manager [req-f548b5e7-5bfd-4600-8e89-d0613afb9fd7 req-35b49c8e-514e-436c-9e79-23d09c0ae33e service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] No waiting events found dispatching network-vif-plugged-fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 844.211884] env[62204]: WARNING nova.compute.manager [req-f548b5e7-5bfd-4600-8e89-d0613afb9fd7 req-35b49c8e-514e-436c-9e79-23d09c0ae33e service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Received unexpected event network-vif-plugged-fd58bbd0-f78f-48e6-9f55-445d86153c6e for instance with vm_state building and task_state spawning. [ 844.395345] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199849, 'name': ReconfigVM_Task, 'duration_secs': 0.559752} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.395651] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Reconfigured VM instance instance-00000044 to attach disk [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.396602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b0e79d-c0e9-48de-9543-5466f65dd488 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.420741] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2686c3b-3af3-4945-a731-14a862cf19d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.435821] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 844.435821] env[62204]: value = "task-1199854" [ 844.435821] env[62204]: _type = "Task" [ 844.435821] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.447144] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199854, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.473263] env[62204]: DEBUG nova.scheduler.client.report [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 844.473557] env[62204]: DEBUG nova.compute.provider_tree [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 98 to 99 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 844.473814] env[62204]: DEBUG nova.compute.provider_tree [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 844.494291] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199852, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.503416] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199851, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758642} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.503658] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] a2a37a1b-3ef0-4be7-924c-66c7a1583b68/a2a37a1b-3ef0-4be7-924c-66c7a1583b68.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.503857] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.504170] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea110d99-c29b-4e7e-80f4-05842d488882 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.506313] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "refresh_cache-031cb3ff-4a80-4961-a399-de31fc72e65b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.506457] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "refresh_cache-031cb3ff-4a80-4961-a399-de31fc72e65b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.506602] env[62204]: DEBUG nova.network.neutron [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 844.512310] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 844.512310] env[62204]: value = "task-1199855" [ 844.512310] env[62204]: _type = "Task" [ 844.512310] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.521280] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.617196] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Releasing lock "refresh_cache-431e7b20-22d8-4742-9c47-cdf9ee08fb32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.617683] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Instance network_info: |[{"id": "71d3f404-a411-4f5e-93a0-ceb7817ec80b", "address": "fa:16:3e:4e:bf:0c", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71d3f404-a4", "ovs_interfaceid": "71d3f404-a411-4f5e-93a0-ceb7817ec80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 844.618636] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:bf:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfbfc55d-8126-40dd-998e-8600ea92f97c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71d3f404-a411-4f5e-93a0-ceb7817ec80b', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.629023] env[62204]: DEBUG oslo.service.loopingcall [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 844.629308] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.629562] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecfe44e0-a6ee-4ef4-9847-d6518abd642a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.654471] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.654471] env[62204]: value = "task-1199856" [ 844.654471] env[62204]: _type = "Task" [ 844.654471] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.662010] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199853, 'name': Rename_Task, 'duration_secs': 0.16472} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.662811] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.663130] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cccecc0a-bf1b-4a89-a7e3-a6cde26337bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.668148] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199856, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.672885] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 844.672885] env[62204]: value = "task-1199857" [ 844.672885] env[62204]: _type = "Task" [ 844.672885] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.685156] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199857, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.861506] env[62204]: DEBUG nova.compute.manager [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Received event network-changed-71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 844.861733] env[62204]: DEBUG nova.compute.manager [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Refreshing instance network info cache due to event network-changed-71d3f404-a411-4f5e-93a0-ceb7817ec80b. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 844.861931] env[62204]: DEBUG oslo_concurrency.lockutils [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] Acquiring lock "refresh_cache-431e7b20-22d8-4742-9c47-cdf9ee08fb32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.862091] env[62204]: DEBUG oslo_concurrency.lockutils [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] Acquired lock "refresh_cache-431e7b20-22d8-4742-9c47-cdf9ee08fb32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.862258] env[62204]: DEBUG nova.network.neutron [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Refreshing network info cache for port 71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 844.946509] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199854, 'name': ReconfigVM_Task, 'duration_secs': 0.176205} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.946812] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.947150] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56c82984-c35a-4adc-bf69-e3354d9a2daa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.954125] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 844.954125] env[62204]: value = "task-1199858" [ 844.954125] env[62204]: _type = "Task" [ 844.954125] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.961872] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.979928] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.894s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.980482] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 844.983260] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.056s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.983484] env[62204]: DEBUG nova.objects.instance [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lazy-loading 'resources' on Instance uuid 67ee5c4d-3825-4580-a26e-74eb8da50883 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 844.994308] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199852, 'name': ReconfigVM_Task, 'duration_secs': 0.725602} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.995197] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 2b728904-19ef-4773-9260-c615da522801/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.996520] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4094ca57-d339-47c2-bfa1-7cf9f17529a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.027686] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-670e9710-0ce0-4adb-ad42-601620354fd4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.044058] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214851} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.045343] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 845.045699] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 845.045699] env[62204]: value = "task-1199859" [ 845.045699] env[62204]: _type = "Task" [ 845.045699] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.046658] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c5ffe3-3539-4059-8a2a-e3017b401f50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.058506] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199859, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.080760] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] a2a37a1b-3ef0-4be7-924c-66c7a1583b68/a2a37a1b-3ef0-4be7-924c-66c7a1583b68.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 845.081912] env[62204]: DEBUG nova.network.neutron [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 845.084023] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fd14e20-37de-4f66-bdb4-31e45672b14d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.105128] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 845.105128] env[62204]: value = "task-1199860" [ 845.105128] env[62204]: _type = "Task" [ 845.105128] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.114156] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199860, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.164322] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199856, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.185867] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199857, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.366039] env[62204]: DEBUG nova.network.neutron [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Updating instance_info_cache with network_info: [{"id": "fd58bbd0-f78f-48e6-9f55-445d86153c6e", "address": "fa:16:3e:13:df:b5", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd58bbd0-f7", "ovs_interfaceid": "fd58bbd0-f78f-48e6-9f55-445d86153c6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.464954] env[62204]: DEBUG oslo_vmware.api [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199858, 'name': PowerOnVM_Task, 'duration_secs': 0.454123} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.465265] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.469078] env[62204]: DEBUG nova.compute.manager [None req-d4dc3d7e-9a11-44f4-b6fb-d42ec08290f8 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 845.469851] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e55c6b7-751f-423d-93da-01517c4359ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.487501] env[62204]: DEBUG nova.compute.utils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.495241] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.495337] env[62204]: DEBUG nova.network.neutron [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.559865] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199859, 'name': ReconfigVM_Task, 'duration_secs': 0.168023} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.562713] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.563365] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-59920dbd-8966-431f-bb1d-3dff03716e5e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.571224] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 845.571224] env[62204]: value = "task-1199861" [ 845.571224] env[62204]: _type = "Task" [ 845.571224] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.585548] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199861, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.616110] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199860, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.664638] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199856, 'name': CreateVM_Task, 'duration_secs': 0.546038} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.667309] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.668475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.668680] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.668994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 845.669275] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d03cb7b-b18c-436f-8c95-72564288f32d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.673922] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 845.673922] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cfdd0b-431c-d8e1-fe36-9b0dc6d248ee" [ 845.673922] env[62204]: _type = "Task" [ 845.673922] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.688339] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cfdd0b-431c-d8e1-fe36-9b0dc6d248ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.692301] env[62204]: DEBUG oslo_vmware.api [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199857, 'name': PowerOnVM_Task, 'duration_secs': 0.606097} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.692604] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.692925] env[62204]: INFO nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Took 9.91 seconds to spawn the instance on the hypervisor. [ 845.693126] env[62204]: DEBUG nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 845.693884] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633fce2d-c5fe-48a8-bc10-aa8a5333aa8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.741219] env[62204]: DEBUG nova.policy [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 845.788092] env[62204]: DEBUG nova.network.neutron [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Updated VIF entry in instance network info cache for port 71d3f404-a411-4f5e-93a0-ceb7817ec80b. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 845.788493] env[62204]: DEBUG nova.network.neutron [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Updating instance_info_cache with network_info: [{"id": "71d3f404-a411-4f5e-93a0-ceb7817ec80b", "address": "fa:16:3e:4e:bf:0c", "network": {"id": "c859c0fe-cab5-4cc8-b844-0795096eefbc", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "2fe8616669064856bebe874898c69d6a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfbfc55d-8126-40dd-998e-8600ea92f97c", "external-id": "nsx-vlan-transportzone-650", "segmentation_id": 650, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71d3f404-a4", "ovs_interfaceid": "71d3f404-a411-4f5e-93a0-ceb7817ec80b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.873463] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "refresh_cache-031cb3ff-4a80-4961-a399-de31fc72e65b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 845.873905] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Instance network_info: |[{"id": "fd58bbd0-f78f-48e6-9f55-445d86153c6e", "address": "fa:16:3e:13:df:b5", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd58bbd0-f7", "ovs_interfaceid": "fd58bbd0-f78f-48e6-9f55-445d86153c6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 845.874671] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:df:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '255460d5-71d4-4bfd-87f1-acc10085db7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd58bbd0-f78f-48e6-9f55-445d86153c6e', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 845.883047] env[62204]: DEBUG oslo.service.loopingcall [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 845.883380] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 845.883658] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d76bc787-d63c-44d4-b3d7-abd35006effc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.909409] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 845.909409] env[62204]: value = "task-1199862" [ 845.909409] env[62204]: _type = "Task" [ 845.909409] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.919799] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199862, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.993748] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 846.007674] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed337a9f-9424-4a12-8e37-57449fcf7619 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.018528] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e870e4a0-d51f-4bc0-afa1-f45a98d65330 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.054030] env[62204]: DEBUG nova.network.neutron [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Successfully created port: aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.056148] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fc5880-13d8-4825-8cf3-fc8eb8bb2c73 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.064690] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097b35c2-9956-4df6-9275-b33df9194224 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.080261] env[62204]: DEBUG nova.compute.provider_tree [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.088690] env[62204]: DEBUG oslo_vmware.api [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1199861, 'name': PowerOnVM_Task, 'duration_secs': 0.441609} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.089568] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.092345] env[62204]: DEBUG nova.compute.manager [None req-621d7a18-ba4c-44ad-b8d0-84addb65dd22 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 846.093085] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca43d658-6814-4cf6-88d5-abb81e3dda97 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.117231] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199860, 'name': ReconfigVM_Task, 'duration_secs': 0.676476} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.117622] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Reconfigured VM instance instance-00000048 to attach disk [datastore1] a2a37a1b-3ef0-4be7-924c-66c7a1583b68/a2a37a1b-3ef0-4be7-924c-66c7a1583b68.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.118897] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2fd744c8-db24-47a7-97f4-88a886f6ce53 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.126269] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 846.126269] env[62204]: value = "task-1199863" [ 846.126269] env[62204]: _type = "Task" [ 846.126269] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.135667] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199863, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.184063] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cfdd0b-431c-d8e1-fe36-9b0dc6d248ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011673} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.184468] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.184783] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.185329] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.185329] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.185506] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.185840] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5f8aebc-4207-4576-94a8-9d0160dd9fd5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.196445] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.196445] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.196445] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24997673-5e49-442b-8eba-f59e6d5bea64 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.200896] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 846.200896] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52716a0c-f759-b768-b68b-d0c6483c8bfd" [ 846.200896] env[62204]: _type = "Task" [ 846.200896] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.209123] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52716a0c-f759-b768-b68b-d0c6483c8bfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.219733] env[62204]: INFO nova.compute.manager [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Took 36.66 seconds to build instance. [ 846.242191] env[62204]: DEBUG nova.compute.manager [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Received event network-changed-fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 846.242191] env[62204]: DEBUG nova.compute.manager [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Refreshing instance network info cache due to event network-changed-fd58bbd0-f78f-48e6-9f55-445d86153c6e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 846.242191] env[62204]: DEBUG oslo_concurrency.lockutils [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] Acquiring lock "refresh_cache-031cb3ff-4a80-4961-a399-de31fc72e65b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.242191] env[62204]: DEBUG oslo_concurrency.lockutils [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] Acquired lock "refresh_cache-031cb3ff-4a80-4961-a399-de31fc72e65b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.242191] env[62204]: DEBUG nova.network.neutron [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Refreshing network info cache for port fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 846.290937] env[62204]: DEBUG oslo_concurrency.lockutils [req-6295542a-371f-42d7-8f8a-805baf71e0e0 req-aaba7833-2ae7-4313-ba88-184cd2596f9d service nova] Releasing lock "refresh_cache-431e7b20-22d8-4742-9c47-cdf9ee08fb32" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.419623] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199862, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.589290] env[62204]: DEBUG nova.scheduler.client.report [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.636614] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199863, 'name': Rename_Task, 'duration_secs': 0.15052} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.636913] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.637199] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60e936a9-a6cd-48bc-b1aa-bd882cbb4eb1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.643887] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 846.643887] env[62204]: value = "task-1199864" [ 846.643887] env[62204]: _type = "Task" [ 846.643887] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.651503] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199864, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.712875] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52716a0c-f759-b768-b68b-d0c6483c8bfd, 'name': SearchDatastore_Task, 'duration_secs': 0.009725} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.713751] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7e842b4-99cf-4512-af12-cf2c9a872d01 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.718926] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 846.718926] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5278f80b-90e0-7086-9dff-56ee56c8cc30" [ 846.718926] env[62204]: _type = "Task" [ 846.718926] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.722182] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2be49efe-1154-425d-9617-22865ad45f94 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.039s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.728257] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5278f80b-90e0-7086-9dff-56ee56c8cc30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.920269] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199862, 'name': CreateVM_Task, 'duration_secs': 0.536001} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.920438] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 846.921112] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.921288] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.921602] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 846.922144] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb47c83f-2e23-4fb5-9ca4-71bfe0035072 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.927069] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 846.927069] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523c8f96-36d7-a1e4-3290-c61390613b6e" [ 846.927069] env[62204]: _type = "Task" [ 846.927069] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.934965] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523c8f96-36d7-a1e4-3290-c61390613b6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.006845] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 847.038952] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 847.039245] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 847.039411] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.039599] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 847.039752] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.039901] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 847.040147] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 847.040325] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 847.040498] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 847.040781] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 847.040986] env[62204]: DEBUG nova.virt.hardware [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 847.041863] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1554e473-da09-4187-a043-36dac956505b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.051780] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4173a920-dae3-4d81-a430-f721c69c4ce3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.096404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.113s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.099663] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.698s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.102914] env[62204]: INFO nova.compute.claims [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.125367] env[62204]: INFO nova.scheduler.client.report [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleted allocations for instance 67ee5c4d-3825-4580-a26e-74eb8da50883 [ 847.161978] env[62204]: DEBUG oslo_vmware.api [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199864, 'name': PowerOnVM_Task, 'duration_secs': 0.481791} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.162974] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.163230] env[62204]: INFO nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Took 14.03 seconds to spawn the instance on the hypervisor. [ 847.163416] env[62204]: DEBUG nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.164241] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f21110-da22-4747-ae86-7e2e1b460da7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.177807] env[62204]: DEBUG nova.network.neutron [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Updated VIF entry in instance network info cache for port fd58bbd0-f78f-48e6-9f55-445d86153c6e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 847.178258] env[62204]: DEBUG nova.network.neutron [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Updating instance_info_cache with network_info: [{"id": "fd58bbd0-f78f-48e6-9f55-445d86153c6e", "address": "fa:16:3e:13:df:b5", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd58bbd0-f7", "ovs_interfaceid": "fd58bbd0-f78f-48e6-9f55-445d86153c6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.224436] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 847.238019] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5278f80b-90e0-7086-9dff-56ee56c8cc30, 'name': SearchDatastore_Task, 'duration_secs': 0.012376} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.238019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.238019] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 431e7b20-22d8-4742-9c47-cdf9ee08fb32/431e7b20-22d8-4742-9c47-cdf9ee08fb32.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.238019] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeb4d8cd-dd7c-4301-bd31-1f6a610c8000 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.245315] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 847.245315] env[62204]: value = "task-1199865" [ 847.245315] env[62204]: _type = "Task" [ 847.245315] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.259084] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.438348] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523c8f96-36d7-a1e4-3290-c61390613b6e, 'name': SearchDatastore_Task, 'duration_secs': 0.012871} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.438745] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.439065] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.439393] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.439709] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.439904] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.440240] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97db83f5-1fe1-4071-9d7b-ecd03a887d3f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.457998] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.457998] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.458133] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d96c667e-2c48-4853-80ac-185564f1fcca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.463787] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 847.463787] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b56e54-2c2b-5fa8-750e-1ec39b80ea6f" [ 847.463787] env[62204]: _type = "Task" [ 847.463787] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.471437] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b56e54-2c2b-5fa8-750e-1ec39b80ea6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.572669] env[62204]: DEBUG nova.network.neutron [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Successfully updated port: aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 847.615376] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.615659] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.636753] env[62204]: DEBUG oslo_concurrency.lockutils [None req-077f2060-f825-4b24-818f-6a0c9868c2b9 tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "67ee5c4d-3825-4580-a26e-74eb8da50883" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.880s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.681033] env[62204]: DEBUG oslo_concurrency.lockutils [req-67328f41-f5f1-458b-b38e-c03510eb4e13 req-c17b2de1-84b8-4029-a7e3-dcf529645682 service nova] Releasing lock "refresh_cache-031cb3ff-4a80-4961-a399-de31fc72e65b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.685253] env[62204]: INFO nova.compute.manager [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Took 40.44 seconds to build instance. [ 847.752644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.756355] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199865, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.975418] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b56e54-2c2b-5fa8-750e-1ec39b80ea6f, 'name': SearchDatastore_Task, 'duration_secs': 0.008617} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.976290] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de2b06b-eee5-4016-8bf3-d9ea8027b693 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.982061] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 847.982061] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5285f1c2-70c3-0579-0e88-0829a21267c8" [ 847.982061] env[62204]: _type = "Task" [ 847.982061] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.989802] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5285f1c2-70c3-0579-0e88-0829a21267c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.075726] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.076077] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.076077] env[62204]: DEBUG nova.network.neutron [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.157604] env[62204]: INFO nova.compute.manager [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Unrescuing [ 848.157898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.158076] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.158261] env[62204]: DEBUG nova.network.neutron [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.187906] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e9346825-7697-48ad-af71-6a0133bec1d5 tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.370s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.258792] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643257} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.259080] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 431e7b20-22d8-4742-9c47-cdf9ee08fb32/431e7b20-22d8-4742-9c47-cdf9ee08fb32.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.259310] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.261871] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e6816fb-b27e-4bff-81e8-f47931131bf1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.267559] env[62204]: DEBUG nova.compute.manager [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Received event network-vif-plugged-aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.267768] env[62204]: DEBUG oslo_concurrency.lockutils [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] Acquiring lock "98805916-8501-4afb-9e1c-a5393f6e5557-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.268037] env[62204]: DEBUG oslo_concurrency.lockutils [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] Lock "98805916-8501-4afb-9e1c-a5393f6e5557-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.268165] env[62204]: DEBUG oslo_concurrency.lockutils [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] Lock "98805916-8501-4afb-9e1c-a5393f6e5557-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.268332] env[62204]: DEBUG nova.compute.manager [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] No waiting events found dispatching network-vif-plugged-aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 848.268479] env[62204]: WARNING nova.compute.manager [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Received unexpected event network-vif-plugged-aae4d007-4d74-4c2c-9d2c-6803c004abe1 for instance with vm_state building and task_state spawning. [ 848.268641] env[62204]: DEBUG nova.compute.manager [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Received event network-changed-aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 848.268795] env[62204]: DEBUG nova.compute.manager [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Refreshing instance network info cache due to event network-changed-aae4d007-4d74-4c2c-9d2c-6803c004abe1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 848.268987] env[62204]: DEBUG oslo_concurrency.lockutils [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] Acquiring lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.270726] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 848.270726] env[62204]: value = "task-1199866" [ 848.270726] env[62204]: _type = "Task" [ 848.270726] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.281702] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.494375] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5285f1c2-70c3-0579-0e88-0829a21267c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011961} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.494648] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.494912] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 031cb3ff-4a80-4961-a399-de31fc72e65b/031cb3ff-4a80-4961-a399-de31fc72e65b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 848.495204] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0dedd88-f798-4edb-aae8-84aade0bcd4f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.506097] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 848.506097] env[62204]: value = "task-1199867" [ 848.506097] env[62204]: _type = "Task" [ 848.506097] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.518901] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.565416] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7820d907-cec8-40f1-a2ba-e44764a9b211 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.573111] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a576c62-4927-45f0-b6d9-238dff4772ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.604543] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdec8587-2092-4c35-8039-b4fee6b61000 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.612378] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7e40ed-56be-451d-a9e2-7e1f29990887 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.625958] env[62204]: DEBUG nova.compute.provider_tree [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.641323] env[62204]: DEBUG nova.network.neutron [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.691936] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 848.783770] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080792} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.784121] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.784793] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dceb8267-80ee-4883-bd56-37387c9465f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.806532] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 431e7b20-22d8-4742-9c47-cdf9ee08fb32/431e7b20-22d8-4742-9c47-cdf9ee08fb32.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.809342] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef16806c-5056-4a83-b0d0-af7de34b0327 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.824152] env[62204]: DEBUG nova.network.neutron [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updating instance_info_cache with network_info: [{"id": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "address": "fa:16:3e:c2:f3:e1", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaae4d007-4d", "ovs_interfaceid": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.830542] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 848.830542] env[62204]: value = "task-1199868" [ 848.830542] env[62204]: _type = "Task" [ 848.830542] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.838543] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.000324] env[62204]: DEBUG nova.network.neutron [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updating instance_info_cache with network_info: [{"id": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "address": "fa:16:3e:32:08:82", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af1ae4e-3a", "ovs_interfaceid": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.015586] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.131026] env[62204]: DEBUG nova.scheduler.client.report [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 849.216683] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.328205] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.328205] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Instance network_info: |[{"id": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "address": "fa:16:3e:c2:f3:e1", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaae4d007-4d", "ovs_interfaceid": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 849.328205] env[62204]: DEBUG oslo_concurrency.lockutils [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] Acquired lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.328205] env[62204]: DEBUG nova.network.neutron [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Refreshing network info cache for port aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 849.330723] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:f3:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aae4d007-4d74-4c2c-9d2c-6803c004abe1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.338261] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating folder: Project (7cc2d3674b2a4fa3806dc0286481368e). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.339197] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c89a9f5-0e23-4b0e-a569-7e55fa787114 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.349223] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.349393] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.349680] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.350075] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.350313] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.357162] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.359154] env[62204]: INFO nova.compute.manager [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Terminating instance [ 849.361285] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created folder: Project (7cc2d3674b2a4fa3806dc0286481368e) in parent group-v259933. [ 849.361566] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating folder: Instances. Parent ref: group-v260061. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 849.362697] env[62204]: DEBUG nova.compute.manager [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 849.362982] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 849.363362] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8afcf8cf-6fc6-40ff-8592-2662812af6ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.366218] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110f0306-4d7e-4277-8a33-7045392be966 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.376984] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.378529] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32275bca-09a7-4ef0-ba45-984c7eaa7466 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.380505] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created folder: Instances in parent group-v260061. [ 849.380867] env[62204]: DEBUG oslo.service.loopingcall [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.381208] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 849.381800] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87d48b0f-1746-4b3a-becc-7114b4337f0f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.404290] env[62204]: DEBUG oslo_vmware.api [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 849.404290] env[62204]: value = "task-1199871" [ 849.404290] env[62204]: _type = "Task" [ 849.404290] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.414564] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.414564] env[62204]: value = "task-1199872" [ 849.414564] env[62204]: _type = "Task" [ 849.414564] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.426166] env[62204]: DEBUG oslo_vmware.api [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199871, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.430474] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199872, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.503885] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.504698] env[62204]: DEBUG nova.objects.instance [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'flavor' on Instance uuid f5f0c15f-ae0d-4615-93ab-3203a5d7e090 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.519441] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199867, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.636404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.637037] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 849.640742] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.589s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.641042] env[62204]: DEBUG nova.objects.instance [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'resources' on Instance uuid 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.664455] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "1121b1b8-127e-475f-8dfc-de43911de39a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.664732] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1121b1b8-127e-475f-8dfc-de43911de39a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.664956] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "1121b1b8-127e-475f-8dfc-de43911de39a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.665208] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1121b1b8-127e-475f-8dfc-de43911de39a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.665569] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1121b1b8-127e-475f-8dfc-de43911de39a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.667846] env[62204]: INFO nova.compute.manager [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Terminating instance [ 849.669890] env[62204]: DEBUG nova.compute.manager [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 849.669890] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 849.674018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32207fc-ac3e-4f5c-b32d-81423b701d50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.678920] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.679186] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-242be9af-b809-4bc3-989d-15fe37e5bff9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.685669] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 849.685669] env[62204]: value = "task-1199873" [ 849.685669] env[62204]: _type = "Task" [ 849.685669] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.694211] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.851310] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199868, 'name': ReconfigVM_Task, 'duration_secs': 0.642251} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.851587] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 431e7b20-22d8-4742-9c47-cdf9ee08fb32/431e7b20-22d8-4742-9c47-cdf9ee08fb32.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.852355] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c32e0a5-d774-468b-8dc0-567cb0451a9d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.858515] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 849.858515] env[62204]: value = "task-1199874" [ 849.858515] env[62204]: _type = "Task" [ 849.858515] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.873127] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199874, 'name': Rename_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.912046] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.912350] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.912560] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.912747] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.912914] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.915571] env[62204]: INFO nova.compute.manager [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Terminating instance [ 849.925204] env[62204]: DEBUG oslo_vmware.api [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199871, 'name': PowerOffVM_Task, 'duration_secs': 0.250596} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.925790] env[62204]: DEBUG nova.compute.manager [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 849.926041] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 849.926640] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.926818] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 849.927575] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35c0552-8766-4249-aa01-7fb76e8f5a61 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.930542] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce05238f-e014-4ab8-9190-4215623dff76 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.936342] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199872, 'name': CreateVM_Task, 'duration_secs': 0.440548} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.936844] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.937794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.937993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.938345] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 849.940533] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b147c67-813d-456b-b98d-76708496821a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.942343] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.942667] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d1414e2-9157-4d25-978e-50609f184f24 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.949586] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 849.949586] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b24de-9916-4ae8-e1b2-7dcda47dede7" [ 849.949586] env[62204]: _type = "Task" [ 849.949586] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.951133] env[62204]: DEBUG oslo_vmware.api [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 849.951133] env[62204]: value = "task-1199876" [ 849.951133] env[62204]: _type = "Task" [ 849.951133] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.965328] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b24de-9916-4ae8-e1b2-7dcda47dede7, 'name': SearchDatastore_Task, 'duration_secs': 0.012632} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.968999] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.969325] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.969626] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.969837] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.970103] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.970439] env[62204]: DEBUG oslo_vmware.api [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.973219] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eb4feb9-1603-421f-b7cb-f51ab6275379 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.982220] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.982474] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.983297] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93fdd85e-2790-49e1-a0e0-0e65f654ab9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.991574] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 849.991574] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203ba82-22fe-bbb5-a998-d5ac53aa1ebc" [ 849.991574] env[62204]: _type = "Task" [ 849.991574] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.999628] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203ba82-22fe-bbb5-a998-d5ac53aa1ebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.017506] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199867, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.111865} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.020580] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 031cb3ff-4a80-4961-a399-de31fc72e65b/031cb3ff-4a80-4961-a399-de31fc72e65b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.020810] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.021608] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a893a8b-36bc-4f5f-8798-9c4ad60b10b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.024311] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a66963fe-a098-43ec-adf1-9f5fb30d31fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.045394] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.047177] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f8240a1-811b-428d-9a9b-473ddb14556d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.049220] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 850.049220] env[62204]: value = "task-1199877" [ 850.049220] env[62204]: _type = "Task" [ 850.049220] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.053994] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 850.053994] env[62204]: value = "task-1199878" [ 850.053994] env[62204]: _type = "Task" [ 850.053994] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.061010] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199877, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.069162] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.069461] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.069670] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Deleting the datastore file [datastore1] a2a37a1b-3ef0-4be7-924c-66c7a1583b68 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.069973] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9ad8559-b8d4-4396-8347-dd39f24bccb7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.075337] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.079746] env[62204]: DEBUG oslo_vmware.api [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for the task: (returnval){ [ 850.079746] env[62204]: value = "task-1199879" [ 850.079746] env[62204]: _type = "Task" [ 850.079746] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.088197] env[62204]: DEBUG oslo_vmware.api [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.144977] env[62204]: DEBUG nova.compute.utils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 850.146839] env[62204]: DEBUG nova.objects.instance [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'numa_topology' on Instance uuid 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 850.147824] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 850.148017] env[62204]: DEBUG nova.network.neutron [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 850.196492] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199873, 'name': PowerOffVM_Task, 'duration_secs': 0.201707} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.196772] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.196968] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.197272] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b50c5b4-78c6-40b7-aa44-29c500a3e874 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.199938] env[62204]: DEBUG nova.network.neutron [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updated VIF entry in instance network info cache for port aae4d007-4d74-4c2c-9d2c-6803c004abe1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 850.200286] env[62204]: DEBUG nova.network.neutron [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updating instance_info_cache with network_info: [{"id": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "address": "fa:16:3e:c2:f3:e1", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaae4d007-4d", "ovs_interfaceid": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.214056] env[62204]: DEBUG nova.policy [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b52997d8756d4096b3dcba62f0bd14b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e70013d6da84d2b9a0719621c9f2c1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.293625] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.293625] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.293888] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleting the datastore file [datastore1] 1121b1b8-127e-475f-8dfc-de43911de39a {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.294176] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-862906d9-b06f-43ba-93f8-ebf6947a91fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.300916] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 850.300916] env[62204]: value = "task-1199881" [ 850.300916] env[62204]: _type = "Task" [ 850.300916] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.309982] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.368022] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199874, 'name': Rename_Task, 'duration_secs': 0.189935} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.368022] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.368245] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83859342-7d49-425a-bf37-9f50f84a6829 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.374762] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 850.374762] env[62204]: value = "task-1199882" [ 850.374762] env[62204]: _type = "Task" [ 850.374762] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.382460] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.464585] env[62204]: DEBUG oslo_vmware.api [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199876, 'name': PowerOffVM_Task, 'duration_secs': 0.21509} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.464878] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.465070] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.465327] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5d0fd11-532b-4568-ba97-c2bce24c6b0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.508511] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203ba82-22fe-bbb5-a998-d5ac53aa1ebc, 'name': SearchDatastore_Task, 'duration_secs': 0.011005} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.508794] env[62204]: DEBUG nova.network.neutron [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Successfully created port: 13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.511507] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ef19a9a-638a-4712-a03e-86ef3ec10358 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.517119] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 850.517119] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a6b6da-baab-ae14-1523-e96b121e266a" [ 850.517119] env[62204]: _type = "Task" [ 850.517119] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.525453] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a6b6da-baab-ae14-1523-e96b121e266a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.560929] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199877, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088949} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.561704] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.562582] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17305eea-e621-469b-9993-a8d8e4693c1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.568583] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199878, 'name': PowerOffVM_Task, 'duration_secs': 0.318002} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.569254] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.574874] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Reconfiguring VM instance instance-00000044 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 850.585438] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6279050b-a7df-48b6-b144-8794f23299aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.599321] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.600031] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.600031] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleting the datastore file [datastore1] 1a1cb81f-383e-48de-8c11-3d5e2c801f40 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.609682] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 031cb3ff-4a80-4961-a399-de31fc72e65b/031cb3ff-4a80-4961-a399-de31fc72e65b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.609682] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-faa11f50-f28d-4a89-ba7c-d605630f9bfd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.613646] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6b6ebb0-b6fe-4c7e-9cbb-74052da91af3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.634463] env[62204]: DEBUG oslo_vmware.api [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Task: {'id': task-1199879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.26296} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.637332] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.637537] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.637719] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.637919] env[62204]: INFO nova.compute.manager [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Took 1.28 seconds to destroy the instance on the hypervisor. [ 850.638216] env[62204]: DEBUG oslo.service.loopingcall [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.638491] env[62204]: DEBUG oslo_vmware.api [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for the task: (returnval){ [ 850.638491] env[62204]: value = "task-1199884" [ 850.638491] env[62204]: _type = "Task" [ 850.638491] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.638717] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 850.638717] env[62204]: value = "task-1199886" [ 850.638717] env[62204]: _type = "Task" [ 850.638717] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.638952] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 850.638952] env[62204]: value = "task-1199885" [ 850.638952] env[62204]: _type = "Task" [ 850.638952] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.639217] env[62204]: DEBUG nova.compute.manager [-] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 850.639318] env[62204]: DEBUG nova.network.neutron [-] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 850.650897] env[62204]: DEBUG nova.objects.base [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Object Instance<2727dc46-98ed-435d-89ef-41bc20cda776> lazy-loaded attributes: resources,numa_topology {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 850.653897] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 850.666757] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199886, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.667058] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.667329] env[62204]: DEBUG oslo_vmware.api [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.703172] env[62204]: DEBUG oslo_concurrency.lockutils [req-059daff0-95d9-4e8b-aac1-eeb5a993b9db req-6c48faf5-1184-438b-825d-43450e877b12 service nova] Releasing lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.813148] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.886302] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199882, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.031396] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a6b6da-baab-ae14-1523-e96b121e266a, 'name': SearchDatastore_Task, 'duration_secs': 0.032491} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.031396] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.031396] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 98805916-8501-4afb-9e1c-a5393f6e5557/98805916-8501-4afb-9e1c-a5393f6e5557.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.034036] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5f4618c-3f4d-4f9f-bade-04a1a64983e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.042073] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 851.042073] env[62204]: value = "task-1199887" [ 851.042073] env[62204]: _type = "Task" [ 851.042073] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.050346] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.157522] env[62204]: DEBUG oslo_vmware.api [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39132} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.163431] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.163662] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.163861] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.164092] env[62204]: INFO nova.compute.manager [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Took 1.24 seconds to destroy the instance on the hypervisor. [ 851.164368] env[62204]: DEBUG oslo.service.loopingcall [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.164612] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199885, 'name': ReconfigVM_Task, 'duration_secs': 0.522862} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.164818] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199886, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.165942] env[62204]: DEBUG nova.compute.manager [-] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 851.166078] env[62204]: DEBUG nova.network.neutron [-] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 851.167814] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Reconfigured VM instance instance-00000044 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 851.168040] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.168791] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42502e9d-f9ad-457d-b261-24a46a1b2bb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.174945] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c498dc7-6072-4c8e-90a3-e025ae38316c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.183873] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1656ae97-4919-4e07-82a2-2b3c3b020105 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.187493] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 851.187493] env[62204]: value = "task-1199888" [ 851.187493] env[62204]: _type = "Task" [ 851.187493] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.196037] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.224024] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eed6a92-091b-4243-9435-11dca4f41022 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.232628] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3adf58d-8241-4420-8430-8dab3e73c46d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.250440] env[62204]: DEBUG nova.compute.provider_tree [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.312960] env[62204]: DEBUG oslo_vmware.api [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Task: {'id': task-1199881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.589895} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.313180] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 851.313373] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 851.313557] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 851.313737] env[62204]: INFO nova.compute.manager [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Took 1.64 seconds to destroy the instance on the hypervisor. [ 851.314009] env[62204]: DEBUG oslo.service.loopingcall [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.314272] env[62204]: DEBUG nova.compute.manager [-] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 851.314343] env[62204]: DEBUG nova.network.neutron [-] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 851.389932] env[62204]: DEBUG oslo_vmware.api [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199882, 'name': PowerOnVM_Task, 'duration_secs': 0.853304} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.390368] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.390596] env[62204]: INFO nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Took 11.04 seconds to spawn the instance on the hypervisor. [ 851.390780] env[62204]: DEBUG nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.391606] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd17e1e-0dfb-4488-9ccb-b14d3f93e756 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.418146] env[62204]: DEBUG nova.compute.manager [req-de23faa7-bcac-49be-bd90-ff1061c315bb req-0fa65d20-2ed1-4ce1-a77a-95f559980b54 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received event network-vif-deleted-d5dbefd9-695f-4a85-859d-9c9d8cb6632b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.418620] env[62204]: INFO nova.compute.manager [req-de23faa7-bcac-49be-bd90-ff1061c315bb req-0fa65d20-2ed1-4ce1-a77a-95f559980b54 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Neutron deleted interface d5dbefd9-695f-4a85-859d-9c9d8cb6632b; detaching it from the instance and deleting it from the info cache [ 851.418745] env[62204]: DEBUG nova.network.neutron [req-de23faa7-bcac-49be-bd90-ff1061c315bb req-0fa65d20-2ed1-4ce1-a77a-95f559980b54 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Updating instance_info_cache with network_info: [{"id": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "address": "fa:16:3e:20:82:80", "network": {"id": "f5131c66-1079-42b3-af97-2dd5d171b3ba", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-37756722", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "fc9c47a4209c4f158e39dd04afd17fa5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "82ca17df-257e-40e6-9ec9-310ed6f05ccb", "external-id": "nsx-vlan-transportzone-778", "segmentation_id": 778, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0c5b107a-29", "ovs_interfaceid": "0c5b107a-29a0-425e-81cd-4dae8593ec01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.554714] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199887, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.654517] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199886, 'name': ReconfigVM_Task, 'duration_secs': 0.698758} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.654830] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 031cb3ff-4a80-4961-a399-de31fc72e65b/031cb3ff-4a80-4961-a399-de31fc72e65b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 851.656369] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a6dd83b-653f-48e2-9942-f3f2cc6a9d16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.663835] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 851.663835] env[62204]: value = "task-1199889" [ 851.663835] env[62204]: _type = "Task" [ 851.663835] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.675665] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199889, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.679069] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 851.700820] env[62204]: DEBUG oslo_vmware.api [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1199888, 'name': PowerOnVM_Task, 'duration_secs': 0.48897} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.700820] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.700820] env[62204]: DEBUG nova.compute.manager [None req-d8578b7d-c2ee-4d54-a475-8ecd22e50e69 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 851.704499] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8de12f5-875a-42bf-aa66-5403268f3b80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.719193] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.719560] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.719767] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.720050] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.720229] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.720422] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.720674] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.720904] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.721249] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.721513] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.721737] env[62204]: DEBUG nova.virt.hardware [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.722676] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5f90b2-6cdb-44bd-b29c-d718c4451122 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.733100] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8b1c77-b90b-48cb-ae33-1a60c2a1fd54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.752025] env[62204]: DEBUG nova.scheduler.client.report [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.862082] env[62204]: DEBUG nova.compute.manager [req-cc0b5c3d-f0fa-4216-9273-a6922aa84d27 req-45c12d66-d101-41ee-8195-f2beb7b1092d service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Received event network-vif-deleted-a4e77bf2-081b-4fb9-9878-8c990c48fe46 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.862082] env[62204]: INFO nova.compute.manager [req-cc0b5c3d-f0fa-4216-9273-a6922aa84d27 req-45c12d66-d101-41ee-8195-f2beb7b1092d service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Neutron deleted interface a4e77bf2-081b-4fb9-9878-8c990c48fe46; detaching it from the instance and deleting it from the info cache [ 851.862301] env[62204]: DEBUG nova.network.neutron [req-cc0b5c3d-f0fa-4216-9273-a6922aa84d27 req-45c12d66-d101-41ee-8195-f2beb7b1092d service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.911503] env[62204]: INFO nova.compute.manager [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Took 41.23 seconds to build instance. [ 851.923593] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdbe81e3-a503-483d-8d3b-0c2a85b16240 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.936748] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c872fa3-01c1-49a3-9fbf-131a3e5c5b59 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.973868] env[62204]: DEBUG nova.network.neutron [-] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.975405] env[62204]: DEBUG nova.compute.manager [req-de23faa7-bcac-49be-bd90-ff1061c315bb req-0fa65d20-2ed1-4ce1-a77a-95f559980b54 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Detach interface failed, port_id=d5dbefd9-695f-4a85-859d-9c9d8cb6632b, reason: Instance a2a37a1b-3ef0-4be7-924c-66c7a1583b68 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 852.054660] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727291} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.054972] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 98805916-8501-4afb-9e1c-a5393f6e5557/98805916-8501-4afb-9e1c-a5393f6e5557.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.055206] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.055627] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e124e377-9806-40dc-96cc-5666bd626ed3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.062465] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 852.062465] env[62204]: value = "task-1199890" [ 852.062465] env[62204]: _type = "Task" [ 852.062465] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.069877] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.169277] env[62204]: DEBUG nova.network.neutron [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Successfully updated port: 13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.176705] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199889, 'name': Rename_Task, 'duration_secs': 0.320159} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.176705] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.176956] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-012ebd1a-6fcd-448f-a0cf-18cf38f6350b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.185692] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 852.185692] env[62204]: value = "task-1199891" [ 852.185692] env[62204]: _type = "Task" [ 852.185692] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.196051] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.260462] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.616s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.263169] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.240s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.263458] env[62204]: DEBUG nova.objects.instance [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2c393123-87de-460a-965d-43473478a79f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 852.365233] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8672653e-19b6-4745-b198-51ccae920d89 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.377323] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4208dbf5-1255-4cad-9513-c31d845471cf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.389094] env[62204]: DEBUG nova.network.neutron [-] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.413519] env[62204]: DEBUG oslo_concurrency.lockutils [None req-df3e7574-5101-47c4-b611-bdbe23614cb7 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.683s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.414252] env[62204]: DEBUG nova.compute.manager [req-cc0b5c3d-f0fa-4216-9273-a6922aa84d27 req-45c12d66-d101-41ee-8195-f2beb7b1092d service nova] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Detach interface failed, port_id=a4e77bf2-081b-4fb9-9878-8c990c48fe46, reason: Instance 1a1cb81f-383e-48de-8c11-3d5e2c801f40 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 852.461129] env[62204]: DEBUG nova.network.neutron [-] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.476479] env[62204]: INFO nova.compute.manager [-] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Took 1.31 seconds to deallocate network for instance. [ 852.573122] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065858} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.573380] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 852.574214] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf696d0c-858d-4568-bb22-d8ed0635b3dd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.597956] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 98805916-8501-4afb-9e1c-a5393f6e5557/98805916-8501-4afb-9e1c-a5393f6e5557.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.598326] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84556280-d3f9-4d5f-8174-4d233fbc5546 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.619884] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 852.619884] env[62204]: value = "task-1199892" [ 852.619884] env[62204]: _type = "Task" [ 852.619884] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.629320] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.672031] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.672031] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.672031] env[62204]: DEBUG nova.network.neutron [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 852.697105] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199891, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.777018] env[62204]: DEBUG oslo_concurrency.lockutils [None req-71e9e33e-2931-4e72-beda-4b885a42b69f tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 56.630s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.778360] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 29.866s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.778685] env[62204]: INFO nova.compute.manager [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Unshelving [ 852.891555] env[62204]: INFO nova.compute.manager [-] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Took 2.25 seconds to deallocate network for instance. [ 852.916508] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 852.964031] env[62204]: INFO nova.compute.manager [-] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Took 1.65 seconds to deallocate network for instance. [ 852.983163] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.133250] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199892, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.198774] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199891, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.210640] env[62204]: DEBUG nova.network.neutron [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.277094] env[62204]: DEBUG oslo_concurrency.lockutils [None req-34539394-80ee-4b17-8400-ce98f4b1872a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.278334] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.639s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.278581] env[62204]: DEBUG nova.objects.instance [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'resources' on Instance uuid a71fd192-f3b6-4f0f-900d-887d15f44d7a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.382445] env[62204]: DEBUG nova.network.neutron [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updating instance_info_cache with network_info: [{"id": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "address": "fa:16:3e:af:18:3c", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13d59ea2-86", "ovs_interfaceid": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.399372] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.441591] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.471573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.620824] env[62204]: DEBUG nova.compute.manager [req-3c1bda91-8f6c-4ece-8f8f-0f3b53e26dda req-9aca1fbe-f436-491a-bfb0-1f8d79179b55 service nova] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Received event network-vif-deleted-0c5b107a-29a0-425e-81cd-4dae8593ec01 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.621264] env[62204]: DEBUG nova.compute.manager [req-3c1bda91-8f6c-4ece-8f8f-0f3b53e26dda req-9aca1fbe-f436-491a-bfb0-1f8d79179b55 service nova] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Received event network-vif-deleted-c5dff251-4a4b-46c8-908a-22925c09890c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.631743] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199892, 'name': ReconfigVM_Task, 'duration_secs': 0.653886} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.632330] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 98805916-8501-4afb-9e1c-a5393f6e5557/98805916-8501-4afb-9e1c-a5393f6e5557.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.632760] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8223e96-36a0-49d3-8196-fa837839aee3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.640821] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 853.640821] env[62204]: value = "task-1199893" [ 853.640821] env[62204]: _type = "Task" [ 853.640821] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.653898] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199893, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.698077] env[62204]: DEBUG oslo_vmware.api [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199891, 'name': PowerOnVM_Task, 'duration_secs': 1.10512} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.698505] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 853.698638] env[62204]: INFO nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Took 10.55 seconds to spawn the instance on the hypervisor. [ 853.698825] env[62204]: DEBUG nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 853.699614] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5968c7d4-a8ee-42b3-9b21-b3403eac41d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.791935] env[62204]: DEBUG nova.compute.utils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 853.887167] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.887596] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Instance network_info: |[{"id": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "address": "fa:16:3e:af:18:3c", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13d59ea2-86", "ovs_interfaceid": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 853.890282] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:18:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13d59ea2-8671-4e65-a3f0-5839b8e92325', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.899256] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Creating folder: Project (6e70013d6da84d2b9a0719621c9f2c1a). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.900638] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b5f48d2-9ba9-4851-ae9e-6671c59a734f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.917569] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Created folder: Project (6e70013d6da84d2b9a0719621c9f2c1a) in parent group-v259933. [ 853.918608] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Creating folder: Instances. Parent ref: group-v260064. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.918608] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d4f960d-549b-416d-b1bd-7eb0959c945d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.928974] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Created folder: Instances in parent group-v260064. [ 853.929372] env[62204]: DEBUG oslo.service.loopingcall [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.929691] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.930281] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f6a3840-1ac3-4ab0-900a-8531fa8321c8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.957531] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.957531] env[62204]: value = "task-1199896" [ 853.957531] env[62204]: _type = "Task" [ 853.957531] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.974280] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199896, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.002359] env[62204]: DEBUG nova.compute.manager [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Received event network-vif-plugged-13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.002580] env[62204]: DEBUG oslo_concurrency.lockutils [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.003211] env[62204]: DEBUG oslo_concurrency.lockutils [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.003211] env[62204]: DEBUG oslo_concurrency.lockutils [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.003211] env[62204]: DEBUG nova.compute.manager [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] No waiting events found dispatching network-vif-plugged-13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 854.003423] env[62204]: WARNING nova.compute.manager [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Received unexpected event network-vif-plugged-13d59ea2-8671-4e65-a3f0-5839b8e92325 for instance with vm_state building and task_state spawning. [ 854.003423] env[62204]: DEBUG nova.compute.manager [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Received event network-changed-13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 854.003617] env[62204]: DEBUG nova.compute.manager [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Refreshing instance network info cache due to event network-changed-13d59ea2-8671-4e65-a3f0-5839b8e92325. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 854.003819] env[62204]: DEBUG oslo_concurrency.lockutils [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] Acquiring lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.003961] env[62204]: DEBUG oslo_concurrency.lockutils [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] Acquired lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.004144] env[62204]: DEBUG nova.network.neutron [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Refreshing network info cache for port 13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 854.074178] env[62204]: DEBUG nova.compute.manager [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.075079] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99166574-2132-456c-b15f-a7261fe1312b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.157814] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199893, 'name': Rename_Task, 'duration_secs': 0.182271} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.158147] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.158781] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65dcb883-e418-4c58-810c-09bad96ebd87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.172518] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 854.172518] env[62204]: value = "task-1199897" [ 854.172518] env[62204]: _type = "Task" [ 854.172518] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.184873] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.225055] env[62204]: INFO nova.compute.manager [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Took 41.54 seconds to build instance. [ 854.265133] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df393b1f-0d99-462e-af94-d2cd428bd0c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.277658] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14868d3-d76e-4b3b-89ac-c1cc4fb10df9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.308098] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.309301] env[62204]: INFO nova.virt.block_device [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Booting with volume cd1bf708-e52b-48aa-ab88-cfd254e6c272 at /dev/sdb [ 854.311695] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.313411] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89238d96-5e23-471f-8c78-8d628a52ffcb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.322755] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1cc42d-6186-4819-bdf5-5a2feebbbe9f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.341114] env[62204]: DEBUG nova.compute.provider_tree [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.365031] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d9c620a-04f1-474a-ae3b-9073464c7b25 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.374147] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a33bfd6-abc5-48f1-8c23-3579680f0532 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.408111] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2cce7e6-d0e7-4d29-8a49-66d5211a8a00 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.418641] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17256ae0-7ee6-4eea-a2e4-1c00bb8779ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.457047] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b7fbd9c-78fc-4607-9785-04838422cee6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.479597] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466cee99-0f4c-4b9a-8ddd-59f0490452c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.482622] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199896, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.496451] env[62204]: DEBUG nova.virt.block_device [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating existing volume attachment record: ae549a33-54e4-46fb-b34a-cadd17915d2b {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 854.591596] env[62204]: INFO nova.compute.manager [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] instance snapshotting [ 854.599077] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1b6095-7977-44f6-8f5b-14edd86cfb65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.623650] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb93b19-2de4-405b-bda8-6482953543c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.684804] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199897, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.727594] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e8c479ea-7452-43f7-8837-e3feff2ddc30 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.510s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.747880] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.748348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.748525] env[62204]: DEBUG nova.compute.manager [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 854.749774] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474e6b7a-2dfc-40e6-93aa-5b4f52e33337 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.757118] env[62204]: DEBUG nova.compute.manager [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 854.757791] env[62204]: DEBUG nova.objects.instance [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'flavor' on Instance uuid 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 854.761163] env[62204]: DEBUG nova.network.neutron [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updated VIF entry in instance network info cache for port 13d59ea2-8671-4e65-a3f0-5839b8e92325. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 854.761556] env[62204]: DEBUG nova.network.neutron [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updating instance_info_cache with network_info: [{"id": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "address": "fa:16:3e:af:18:3c", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13d59ea2-86", "ovs_interfaceid": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.822683] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.822924] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 854.844871] env[62204]: DEBUG nova.scheduler.client.report [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 854.979043] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199896, 'name': CreateVM_Task, 'duration_secs': 0.600259} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.979043] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.979043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.979527] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.980090] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.980515] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca2ada66-bb8b-45be-b7d1-ccbf558e08e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.988351] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 854.988351] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d872a-7486-d9ac-1923-eced97743406" [ 854.988351] env[62204]: _type = "Task" [ 854.988351] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.000570] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d872a-7486-d9ac-1923-eced97743406, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.134239] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 855.134670] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-72410d71-0daf-4054-9148-0d4861017f20 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.144557] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 855.144557] env[62204]: value = "task-1199901" [ 855.144557] env[62204]: _type = "Task" [ 855.144557] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.155885] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199901, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.184290] env[62204]: DEBUG oslo_vmware.api [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1199897, 'name': PowerOnVM_Task, 'duration_secs': 0.96624} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.184615] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.184919] env[62204]: INFO nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Took 8.18 seconds to spawn the instance on the hypervisor. [ 855.185212] env[62204]: DEBUG nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.186127] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8877472d-4cf2-434c-9640-c2e7e98a2919 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.266097] env[62204]: DEBUG oslo_concurrency.lockutils [req-69d16d57-b73c-4dc9-8358-768b92298ce0 req-305e41ca-5d1f-4e8c-a20b-16f7bdda9937 service nova] Releasing lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.267620] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.267899] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e805187e-84d3-46fd-8166-4c8fc359bb65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.275480] env[62204]: DEBUG oslo_vmware.api [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 855.275480] env[62204]: value = "task-1199902" [ 855.275480] env[62204]: _type = "Task" [ 855.275480] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.287260] env[62204]: DEBUG oslo_vmware.api [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.352018] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.355998] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.112s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.357240] env[62204]: INFO nova.compute.claims [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.375724] env[62204]: INFO nova.scheduler.client.report [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted allocations for instance a71fd192-f3b6-4f0f-900d-887d15f44d7a [ 855.428227] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "031cb3ff-4a80-4961-a399-de31fc72e65b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.428552] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.428832] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "031cb3ff-4a80-4961-a399-de31fc72e65b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.429120] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.429373] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.431932] env[62204]: INFO nova.compute.manager [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Terminating instance [ 855.434460] env[62204]: DEBUG nova.compute.manager [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 855.434633] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.435978] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ce727b-c20d-4327-b4ef-3a7692630ac3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.447089] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.447486] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c25c4f72-5c33-4287-803e-43e543447bb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.457341] env[62204]: DEBUG oslo_vmware.api [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 855.457341] env[62204]: value = "task-1199903" [ 855.457341] env[62204]: _type = "Task" [ 855.457341] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.471971] env[62204]: DEBUG oslo_vmware.api [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199903, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.510597] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d872a-7486-d9ac-1923-eced97743406, 'name': SearchDatastore_Task, 'duration_secs': 0.017064} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.510597] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.510597] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.510597] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.510597] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.510597] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.510597] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10f332bb-7e2f-45f2-b240-c3edb91c21e8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.520765] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.521081] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.522182] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631ec15f-9ddf-49f5-9308-c345fed55edf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.531536] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 855.531536] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529b076d-4139-01c7-8893-8cf795283fe9" [ 855.531536] env[62204]: _type = "Task" [ 855.531536] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.541191] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529b076d-4139-01c7-8893-8cf795283fe9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.545489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.545735] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.654329] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199901, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.705112] env[62204]: INFO nova.compute.manager [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Took 39.09 seconds to build instance. [ 855.785948] env[62204]: DEBUG oslo_vmware.api [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199902, 'name': PowerOffVM_Task, 'duration_secs': 0.255258} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.786293] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.786480] env[62204]: DEBUG nova.compute.manager [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.787283] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-173304e6-cb3b-4792-970d-dab54b6b081f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.885689] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48fd8ce7-255c-4769-9230-dd1cc8b40fbd tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.044s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.886796] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Acquired lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.887796] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33d8ba0-66a0-46fd-b437-4a74c25f6a75 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.897988] env[62204]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 855.898259] env[62204]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=62204) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 855.898886] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a70b2e1b-3247-44d0-b6ac-c45bcaf6759b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.910620] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a16865-a196-47c2-9f9f-1119f27c3ab6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.947927] env[62204]: ERROR root [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-259983' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-259983' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-259983' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-259983'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-259983' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-259983' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-259983'}\n"]: nova.exception.InstanceNotFound: Instance a71fd192-f3b6-4f0f-900d-887d15f44d7a could not be found. [ 855.948253] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Releasing lock "a71fd192-f3b6-4f0f-900d-887d15f44d7a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.948343] env[62204]: DEBUG nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Detach interface failed, port_id=33e67759-b1fb-4395-9ed1-bf2102c8d3ee, reason: Instance a71fd192-f3b6-4f0f-900d-887d15f44d7a could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 855.948537] env[62204]: DEBUG nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Received event network-vif-plugged-5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.948729] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Acquiring lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.949014] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.949184] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.949282] env[62204]: DEBUG nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] No waiting events found dispatching network-vif-plugged-5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 855.949451] env[62204]: WARNING nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Received unexpected event network-vif-plugged-5af1ae4e-3a58-4d76-854a-59ac01168a4c for instance with vm_state building and task_state spawning. [ 855.949616] env[62204]: DEBUG nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Received event network-changed-5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 855.949771] env[62204]: DEBUG nova.compute.manager [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Refreshing instance network info cache due to event network-changed-5af1ae4e-3a58-4d76-854a-59ac01168a4c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 855.949960] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Acquiring lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.950111] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Acquired lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.950274] env[62204]: DEBUG nova.network.neutron [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Refreshing network info cache for port 5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 855.969571] env[62204]: DEBUG oslo_vmware.api [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199903, 'name': PowerOffVM_Task, 'duration_secs': 0.167623} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.969866] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.970064] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.970559] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18647971-4488-41b0-81e9-08d63aba8a84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.042868] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529b076d-4139-01c7-8893-8cf795283fe9, 'name': SearchDatastore_Task, 'duration_secs': 0.010619} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.044149] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e98eed6-4a7d-4b17-8579-2ae86a024e7f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.048047] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 856.053314] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 856.053314] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521aa09d-a10e-e812-cc53-1e0f192445ea" [ 856.053314] env[62204]: _type = "Task" [ 856.053314] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.053714] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 856.055033] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 856.055033] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleting the datastore file [datastore1] 031cb3ff-4a80-4961-a399-de31fc72e65b {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.055604] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf230cfc-7708-42d3-b4da-2bf2afe4f894 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.068796] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521aa09d-a10e-e812-cc53-1e0f192445ea, 'name': SearchDatastore_Task, 'duration_secs': 0.012848} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.070270] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.071322] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 4dc4546f-85e6-4259-9ccd-a7396669eace/4dc4546f-85e6-4259-9ccd-a7396669eace.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.071322] env[62204]: DEBUG oslo_vmware.api [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 856.071322] env[62204]: value = "task-1199905" [ 856.071322] env[62204]: _type = "Task" [ 856.071322] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.071322] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbc58a4e-4ee1-477c-a33b-7350c85328ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.083402] env[62204]: DEBUG oslo_vmware.api [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.085078] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 856.085078] env[62204]: value = "task-1199906" [ 856.085078] env[62204]: _type = "Task" [ 856.085078] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.095520] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199906, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.158648] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199901, 'name': CreateSnapshot_Task, 'duration_secs': 0.896515} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.158940] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 856.159755] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ecd07b-0b5f-4c8a-8f62-3ec28d5023b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.207254] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cff76707-4033-4dd9-8ecd-d2512b9f7a47 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "98805916-8501-4afb-9e1c-a5393f6e5557" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.543s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.303404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d3cd4f05-b6c8-400d-889f-eb236a82fdc0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.333853] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.334071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.334237] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Forcefully refreshing network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 856.584242] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.600190] env[62204]: DEBUG oslo_vmware.api [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1199905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176535} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.601530] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.601530] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.601530] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.601790] env[62204]: INFO nova.compute.manager [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 856.601861] env[62204]: DEBUG oslo.service.loopingcall [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.602072] env[62204]: DEBUG nova.compute.manager [-] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 856.602162] env[62204]: DEBUG nova.network.neutron [-] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 856.607597] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199906, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.680120] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 856.683536] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ec4c4056-48b0-4f41-952c-7dafbc6dfc8d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.693411] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 856.693411] env[62204]: value = "task-1199907" [ 856.693411] env[62204]: _type = "Task" [ 856.693411] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.707590] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199907, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.763013] env[62204]: DEBUG nova.network.neutron [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updated VIF entry in instance network info cache for port 5af1ae4e-3a58-4d76-854a-59ac01168a4c. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 856.763707] env[62204]: DEBUG nova.network.neutron [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updating instance_info_cache with network_info: [{"id": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "address": "fa:16:3e:32:08:82", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5af1ae4e-3a", "ovs_interfaceid": "5af1ae4e-3a58-4d76-854a-59ac01168a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.846026] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02413b90-80e8-4557-b904-ed123823088f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.856200] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa95d382-b6c2-49e1-9c06-42013f51b181 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.901182] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09801e47-c2a5-4393-acda-e6db1a74aed7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.911100] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0bfd71-6219-4241-8f37-55d332832479 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.931351] env[62204]: DEBUG nova.compute.provider_tree [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.098419] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199906, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585611} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.098732] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 4dc4546f-85e6-4259-9ccd-a7396669eace/4dc4546f-85e6-4259-9ccd-a7396669eace.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.099140] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.099408] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86b3eca6-864d-4b32-be07-cd9a803f2009 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.107343] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 857.107343] env[62204]: value = "task-1199909" [ 857.107343] env[62204]: _type = "Task" [ 857.107343] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.119994] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.205104] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199907, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.266727] env[62204]: DEBUG oslo_concurrency.lockutils [req-129b0edb-5d42-4ad7-b485-93e30b5e0807 req-e1a62024-3e79-4e50-8183-6824b9db793a service nova] Releasing lock "refresh_cache-f5f0c15f-ae0d-4615-93ab-3203a5d7e090" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.327694] env[62204]: DEBUG nova.compute.manager [req-006c8cba-0e13-4695-b494-a1990be393fc req-63014b73-fbcc-4f11-9fee-f7279bee0636 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Received event network-vif-deleted-fd58bbd0-f78f-48e6-9f55-445d86153c6e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.327977] env[62204]: INFO nova.compute.manager [req-006c8cba-0e13-4695-b494-a1990be393fc req-63014b73-fbcc-4f11-9fee-f7279bee0636 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Neutron deleted interface fd58bbd0-f78f-48e6-9f55-445d86153c6e; detaching it from the instance and deleting it from the info cache [ 857.328083] env[62204]: DEBUG nova.network.neutron [req-006c8cba-0e13-4695-b494-a1990be393fc req-63014b73-fbcc-4f11-9fee-f7279bee0636 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.435339] env[62204]: DEBUG nova.scheduler.client.report [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.533671] env[62204]: DEBUG nova.network.neutron [-] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.618943] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076302} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.619773] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.620618] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f899b1fd-28d7-4d09-89e9-203638966e20 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.644778] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 4dc4546f-85e6-4259-9ccd-a7396669eace/4dc4546f-85e6-4259-9ccd-a7396669eace.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.645145] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca79adff-486d-42f7-9bbf-fd9e7267d437 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.666062] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 857.666062] env[62204]: value = "task-1199910" [ 857.666062] env[62204]: _type = "Task" [ 857.666062] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.678354] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.707344] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199907, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.744888] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.830598] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96b3b7e3-2c5d-4acc-a8e7-a9ccec5dd68b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.842973] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd910abd-126f-451b-b936-4ca11216330e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.876997] env[62204]: DEBUG nova.compute.manager [req-006c8cba-0e13-4695-b494-a1990be393fc req-63014b73-fbcc-4f11-9fee-f7279bee0636 service nova] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Detach interface failed, port_id=fd58bbd0-f78f-48e6-9f55-445d86153c6e, reason: Instance 031cb3ff-4a80-4961-a399-de31fc72e65b could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 857.932021] env[62204]: DEBUG nova.objects.instance [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'flavor' on Instance uuid 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.941316] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.941903] env[62204]: DEBUG nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 857.945278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.785s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.945703] env[62204]: DEBUG nova.objects.instance [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lazy-loading 'resources' on Instance uuid 60eaec9c-5dcc-4e2f-9649-78acba318a6b {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.036026] env[62204]: INFO nova.compute.manager [-] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Took 1.43 seconds to deallocate network for instance. [ 858.178639] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199910, 'name': ReconfigVM_Task, 'duration_secs': 0.367949} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.179115] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 4dc4546f-85e6-4259-9ccd-a7396669eace/4dc4546f-85e6-4259-9ccd-a7396669eace.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.179654] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1d1b77dc-7dc1-449b-83ea-2842b0d4a76f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.188367] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 858.188367] env[62204]: value = "task-1199911" [ 858.188367] env[62204]: _type = "Task" [ 858.188367] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.198720] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199911, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.207704] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199907, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.248356] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.248674] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updated the network info_cache for instance {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 858.248816] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.248965] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.249137] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.249319] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.249481] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.249631] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.249763] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 858.250226] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 858.440180] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.440599] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.440647] env[62204]: DEBUG nova.network.neutron [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.440868] env[62204]: DEBUG nova.objects.instance [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'info_cache' on Instance uuid 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.448665] env[62204]: DEBUG nova.compute.utils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 858.456909] env[62204]: DEBUG nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 858.543655] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.702046] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199911, 'name': Rename_Task, 'duration_secs': 0.157068} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.705440] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.705725] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94fb6ef4-36c5-4ca9-a313-ac3b68933e5e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.718408] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199907, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.718993] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 858.718993] env[62204]: value = "task-1199912" [ 858.718993] env[62204]: _type = "Task" [ 858.718993] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.727801] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.756663] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.757033] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.757243] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.898576] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bf3019-a020-4018-a64e-04abf3ae1b48 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.909178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b3b3d4-f213-4051-9a2b-332f6d14f2e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.943343] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa760ce-8026-4ccf-909b-6ea23a6adb18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.947869] env[62204]: DEBUG nova.objects.base [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Object Instance<57e14d47-1d3f-4fed-93c1-11cfc17dc9bc> lazy-loaded attributes: flavor,info_cache {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 858.963298] env[62204]: DEBUG nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 858.967676] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0307edbb-3dbe-4608-8fbf-35a8425c8b16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.985540] env[62204]: DEBUG nova.compute.provider_tree [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.211527] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199907, 'name': CloneVM_Task, 'duration_secs': 2.132155} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.211813] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Created linked-clone VM from snapshot [ 859.212591] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf0e497-c76d-438f-8287-480d58a4eec5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.223735] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Uploading image 756c1996-a6d4-4dc1-b787-af7e2b1a2b97 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 859.234739] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199912, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.246064] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 859.246064] env[62204]: value = "vm-260070" [ 859.246064] env[62204]: _type = "VirtualMachine" [ 859.246064] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 859.246525] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b26d4bdf-cdce-4441-b91c-dc608f0c415c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.255184] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lease: (returnval){ [ 859.255184] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52484680-eae8-0062-6391-1fe0ec69c617" [ 859.255184] env[62204]: _type = "HttpNfcLease" [ 859.255184] env[62204]: } obtained for exporting VM: (result){ [ 859.255184] env[62204]: value = "vm-260070" [ 859.255184] env[62204]: _type = "VirtualMachine" [ 859.255184] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 859.255613] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the lease: (returnval){ [ 859.255613] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52484680-eae8-0062-6391-1fe0ec69c617" [ 859.255613] env[62204]: _type = "HttpNfcLease" [ 859.255613] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 859.259411] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 859.267009] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.267009] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52484680-eae8-0062-6391-1fe0ec69c617" [ 859.267009] env[62204]: _type = "HttpNfcLease" [ 859.267009] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 859.488847] env[62204]: DEBUG nova.scheduler.client.report [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.530165] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.530165] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.734487] env[62204]: DEBUG oslo_vmware.api [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1199912, 'name': PowerOnVM_Task, 'duration_secs': 0.545437} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.734803] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.734938] env[62204]: INFO nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Took 8.06 seconds to spawn the instance on the hypervisor. [ 859.735218] env[62204]: DEBUG nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.736039] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b5ee1f-8df3-43c6-b402-dcc82fcba74b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.763255] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.763255] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52484680-eae8-0062-6391-1fe0ec69c617" [ 859.763255] env[62204]: _type = "HttpNfcLease" [ 859.763255] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 859.765798] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 859.765798] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52484680-eae8-0062-6391-1fe0ec69c617" [ 859.765798] env[62204]: _type = "HttpNfcLease" [ 859.765798] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 859.766572] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c565d62-4f04-4c11-8c2f-f382b10704d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.776811] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52081a61-083a-4696-3690-5c32ee6369bc/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 859.777051] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52081a61-083a-4696-3690-5c32ee6369bc/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 859.837375] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.870599] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3b49b4a0-8807-47fe-83a4-fe0db49eca00 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.980305] env[62204]: DEBUG nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 859.994097] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.049s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.996718] env[62204]: DEBUG oslo_concurrency.lockutils [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.572s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.004190] env[62204]: DEBUG nova.network.neutron [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.017584] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 860.017832] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 860.018029] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.018233] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 860.018385] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.018553] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 860.018785] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 860.018965] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 860.019325] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 860.019422] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 860.019614] env[62204]: DEBUG nova.virt.hardware [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.020601] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40324c8-08ff-4aee-86ab-38a3d803db85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.030785] env[62204]: INFO nova.scheduler.client.report [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Deleted allocations for instance 60eaec9c-5dcc-4e2f-9649-78acba318a6b [ 860.035659] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b56eefa-6d3e-441d-93c3-407fd546b181 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.055744] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.061667] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Creating folder: Project (a14848aa059c42fb85167a354d3bbeb4). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.066468] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b320c0d-3c5c-47e4-80f0-e7ec82850005 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.078913] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Created folder: Project (a14848aa059c42fb85167a354d3bbeb4) in parent group-v259933. [ 860.078913] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Creating folder: Instances. Parent ref: group-v260071. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.079122] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-352d9260-562f-4248-a121-ce7a4a5f1575 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.094398] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Created folder: Instances in parent group-v260071. [ 860.094559] env[62204]: DEBUG oslo.service.loopingcall [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.097416] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.097829] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74b90d6f-6a4c-4bb2-b251-f36292972797 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.123635] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.123635] env[62204]: value = "task-1199916" [ 860.123635] env[62204]: _type = "Task" [ 860.123635] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.132913] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199916, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.257315] env[62204]: INFO nova.compute.manager [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Took 41.88 seconds to build instance. [ 860.511823] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.514143] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c78fa2-0d85-4b35-ba39-0341c6f8603a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.524579] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e5cca3-6bc3-4386-98bd-e5196db06df5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.559592] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeba7dbc-3525-4986-80b4-b1a498e7852a tempest-ServerMetadataTestJSON-2083808139 tempest-ServerMetadataTestJSON-2083808139-project-member] Lock "60eaec9c-5dcc-4e2f-9649-78acba318a6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.434s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.561327] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba67195-53b7-4e51-afb6-c04899ac9ff3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.572533] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28341700-9676-41f6-a52a-908d881a732c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.589139] env[62204]: DEBUG nova.compute.provider_tree [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.633563] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.637971] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199916, 'name': CreateVM_Task, 'duration_secs': 0.417449} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.638287] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.638901] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.639206] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.639707] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 860.640132] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b54b18d7-7761-4bb5-ac10-be89b089dc1a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.645976] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 860.645976] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523f4ea9-8ac3-116d-adc0-9bb6131dcae9" [ 860.645976] env[62204]: _type = "Task" [ 860.645976] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.656959] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523f4ea9-8ac3-116d-adc0-9bb6131dcae9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.760169] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f6e0c63c-71d5-4256-a361-1013e0a513bc tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.127s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.019740] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.020088] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-619df652-fe51-46be-b955-f7ab6844c0c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.030156] env[62204]: DEBUG oslo_vmware.api [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 861.030156] env[62204]: value = "task-1199917" [ 861.030156] env[62204]: _type = "Task" [ 861.030156] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.041934] env[62204]: DEBUG oslo_vmware.api [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.094533] env[62204]: DEBUG nova.scheduler.client.report [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 861.161889] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523f4ea9-8ac3-116d-adc0-9bb6131dcae9, 'name': SearchDatastore_Task, 'duration_secs': 0.020429} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.162562] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.162950] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.164101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.164101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.164101] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.164101] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00af86de-2590-4286-8287-0b3a508c6993 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.175274] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.175582] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.176450] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9035c71-3e17-4ca1-976b-a99505fa49b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.184608] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 861.184608] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5289c1ac-07b3-eaea-d487-d11cc5eadedb" [ 861.184608] env[62204]: _type = "Task" [ 861.184608] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.195211] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5289c1ac-07b3-eaea-d487-d11cc5eadedb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.246899] env[62204]: DEBUG nova.compute.manager [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Received event network-changed-13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 861.248383] env[62204]: DEBUG nova.compute.manager [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Refreshing instance network info cache due to event network-changed-13d59ea2-8671-4e65-a3f0-5839b8e92325. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 861.248383] env[62204]: DEBUG oslo_concurrency.lockutils [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] Acquiring lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.248383] env[62204]: DEBUG oslo_concurrency.lockutils [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] Acquired lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.248383] env[62204]: DEBUG nova.network.neutron [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Refreshing network info cache for port 13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 861.262752] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.542060] env[62204]: DEBUG oslo_vmware.api [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.608870] env[62204]: DEBUG oslo_concurrency.lockutils [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.608870] env[62204]: INFO nova.compute.manager [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Successfully reverted task state from image_uploading on failure for instance. [ 861.609603] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.737s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.611309] env[62204]: INFO nova.compute.claims [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server [None req-69f67b3b-b2fa-460f-9b21-16aa0efac0bf tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created [ 861.620011] env[62204]: Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created' [ 861.620011] env[62204]: Faults: [ManagedObjectNotFound] [ 861.620011] env[62204]: Details: {'obj': 'snapshot-260039'} [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server response = request(managed_object, **kwargs) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server return client.invoke(args, kwargs) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server result = self.send(soapenv, timeout=timeout) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server return self.process_reply(reply.message, None, None) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server raise WebFault(fault, replyroot) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server suds.WebFault: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created' [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server return api_method(*args, **kwargs) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server raise exceptions.VimFaultException(fault_list, fault_string, [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.VimFaultException: The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created' [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-260039'} [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server raise self.value [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server raise self.value [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 861.620011] env[62204]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server raise self.value [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server raise self.value [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4442, in snapshot_instance [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4475, in _snapshot_instance [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 565, in snapshot [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 933, in _delete_vm_snapshot [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server delete_snapshot_task = self._session._call_method( [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 127, in _call_method [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception() as ctxt: [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server raise self.value [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return self.invoke_api(module, method, *args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return _invoke_api(module, method, *args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return evt.wait() [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server idle = self.f(*self.args, **self.kw) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api [ 861.624026] env[62204]: ERROR oslo_messaging.rpc.server raise clazz(str(excep), [ 861.625382] env[62204]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created [ 861.625382] env[62204]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-260039' has already been deleted or has not been completely created' [ 861.625382] env[62204]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 861.625382] env[62204]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-260039'} [ 861.625382] env[62204]: ERROR oslo_messaging.rpc.server [ 861.696373] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5289c1ac-07b3-eaea-d487-d11cc5eadedb, 'name': SearchDatastore_Task, 'duration_secs': 0.01358} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.698569] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a30cd38-998f-44ed-bb23-2087ef5a72d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.704202] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 861.704202] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d3a2f2-8963-d2de-8a57-7d3b123b7b91" [ 861.704202] env[62204]: _type = "Task" [ 861.704202] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.713760] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d3a2f2-8963-d2de-8a57-7d3b123b7b91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.722618] env[62204]: DEBUG oslo_concurrency.lockutils [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.723078] env[62204]: DEBUG oslo_concurrency.lockutils [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.792464] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.049195] env[62204]: DEBUG oslo_vmware.api [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1199917, 'name': PowerOnVM_Task, 'duration_secs': 0.897515} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.049632] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.049891] env[62204]: DEBUG nova.compute.manager [None req-ee186c82-4d97-4ae5-b144-ab2a236c4000 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 862.050762] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a716c9-c80a-4a89-bb00-1e3dfa7c33fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.219092] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d3a2f2-8963-d2de-8a57-7d3b123b7b91, 'name': SearchDatastore_Task, 'duration_secs': 0.021977} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.223022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.223022] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.223022] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c372307f-bb2d-424a-8294-501778465d78 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.226273] env[62204]: INFO nova.compute.manager [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Detaching volume 36c27753-d664-470d-98bd-effeeada2008 [ 862.233310] env[62204]: DEBUG nova.network.neutron [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updated VIF entry in instance network info cache for port 13d59ea2-8671-4e65-a3f0-5839b8e92325. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 862.233310] env[62204]: DEBUG nova.network.neutron [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updating instance_info_cache with network_info: [{"id": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "address": "fa:16:3e:af:18:3c", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13d59ea2-86", "ovs_interfaceid": "13d59ea2-8671-4e65-a3f0-5839b8e92325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.233310] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 862.233310] env[62204]: value = "task-1199918" [ 862.233310] env[62204]: _type = "Task" [ 862.233310] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.245773] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.284820] env[62204]: INFO nova.virt.block_device [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Attempting to driver detach volume 36c27753-d664-470d-98bd-effeeada2008 from mountpoint /dev/sdb [ 862.285125] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 862.285339] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260038', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'name': 'volume-36c27753-d664-470d-98bd-effeeada2008', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4793e9fd-be87-4885-8f0e-1fcef6ce4d2f', 'attached_at': '', 'detached_at': '', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'serial': '36c27753-d664-470d-98bd-effeeada2008'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 862.286699] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a3ada5-76e7-4c24-9463-416e33bf2a32 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.313450] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00458914-adec-4d61-9fc0-1af49bf06187 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.321514] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3818a9b-1e1e-44ce-964f-6f664db4131d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.345277] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ebeb80-ff9b-4499-8443-da932357a1db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.366162] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] The volume has not been displaced from its original location: [datastore2] volume-36c27753-d664-470d-98bd-effeeada2008/volume-36c27753-d664-470d-98bd-effeeada2008.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 862.371355] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Reconfiguring VM instance instance-00000036 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 862.371790] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3483c8bf-ccd3-4a5b-ba61-f83ea718c29a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.392018] env[62204]: DEBUG oslo_vmware.api [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 862.392018] env[62204]: value = "task-1199919" [ 862.392018] env[62204]: _type = "Task" [ 862.392018] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.401655] env[62204]: DEBUG oslo_vmware.api [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199919, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.742025] env[62204]: DEBUG oslo_concurrency.lockutils [req-3bb92bfa-3ffc-4be3-888f-6a7d4bd69805 req-b8a543c5-78e7-4a2e-baca-bccfaff7b86a service nova] Releasing lock "refresh_cache-4dc4546f-85e6-4259-9ccd-a7396669eace" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.752514] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199918, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.903617] env[62204]: DEBUG oslo_vmware.api [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199919, 'name': ReconfigVM_Task, 'duration_secs': 0.311565} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.904043] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Reconfigured VM instance instance-00000036 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 862.911901] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7e20a76-c95e-4625-9fa2-1838be203045 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.930716] env[62204]: DEBUG oslo_vmware.api [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 862.930716] env[62204]: value = "task-1199920" [ 862.930716] env[62204]: _type = "Task" [ 862.930716] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.947895] env[62204]: DEBUG oslo_vmware.api [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199920, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.167104] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57c92a3-60ae-4fa9-b6e9-acf6c71fc4eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.180973] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e93ba2-0144-40c4-81b0-c906b40efbb8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.218180] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56ab61c-ec61-4266-b348-5d48598b049e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.229922] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5ce55c-c3e4-4886-9fa6-056ff9391e38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.253686] env[62204]: DEBUG nova.compute.provider_tree [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.262189] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199918, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560771} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.262573] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.262711] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.263818] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd93cef1-7d63-40a1-b244-3067bed42c5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.274118] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 863.274118] env[62204]: value = "task-1199921" [ 863.274118] env[62204]: _type = "Task" [ 863.274118] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.284348] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.442230] env[62204]: DEBUG oslo_vmware.api [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199920, 'name': ReconfigVM_Task, 'duration_secs': 0.193266} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.442590] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260038', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'name': 'volume-36c27753-d664-470d-98bd-effeeada2008', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4793e9fd-be87-4885-8f0e-1fcef6ce4d2f', 'attached_at': '', 'detached_at': '', 'volume_id': '36c27753-d664-470d-98bd-effeeada2008', 'serial': '36c27753-d664-470d-98bd-effeeada2008'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 863.757468] env[62204]: DEBUG nova.scheduler.client.report [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 863.784519] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10745} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.784685] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 863.785483] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008ea6c8-b4e8-428d-aadc-e8551d389c90 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.806916] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 863.807636] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a00365c-ad75-4b45-968f-1046e0e544be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.828555] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 863.828555] env[62204]: value = "task-1199922" [ 863.828555] env[62204]: _type = "Task" [ 863.828555] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.838046] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199922, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.986926] env[62204]: DEBUG nova.objects.instance [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.262278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.262905] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 864.266031] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.851s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.266123] env[62204]: DEBUG nova.objects.instance [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lazy-loading 'resources' on Instance uuid dba1edda-edfd-4a97-ab95-48f3f5a933f8 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 864.341509] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199922, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.772738] env[62204]: DEBUG nova.compute.utils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.775038] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 864.775266] env[62204]: DEBUG nova.network.neutron [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 864.842787] env[62204]: DEBUG nova.policy [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6054f141cad7421f85bbb5944f408070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6907df6f17b142c0b4881f15f3b88a9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 864.851077] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199922, 'name': ReconfigVM_Task, 'duration_secs': 0.570326} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.851388] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.854281] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df969b90-4325-4990-ad72-d3674c2eadde {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.862851] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 864.862851] env[62204]: value = "task-1199923" [ 864.862851] env[62204]: _type = "Task" [ 864.862851] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.877247] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199923, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.960827] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.996453] env[62204]: DEBUG oslo_concurrency.lockutils [None req-506e01da-d257-4b27-9b83-5c17d69afacb tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.273s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.997700] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.037s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.998329] env[62204]: DEBUG nova.compute.manager [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 864.999037] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc2ff0a-d2cb-4519-986c-c4f0b1373d02 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.008649] env[62204]: DEBUG nova.compute.manager [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 865.009402] env[62204]: DEBUG nova.objects.instance [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 865.192008] env[62204]: DEBUG nova.network.neutron [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Successfully created port: 5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.220878] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025911e1-b2a5-4de9-98ae-f3ce6d0e47f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.229441] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992c9211-486b-4a7e-a278-4a1f2a4dde63 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.261921] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0e32ce-0a47-44b3-b242-73aaf994ebf4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.271631] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dc8870-3fb5-45ab-9191-db112a6644b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.278352] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 865.291090] env[62204]: DEBUG nova.compute.provider_tree [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.372943] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199923, 'name': Rename_Task, 'duration_secs': 0.228102} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.373256] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.373538] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f6f53b57-9b95-41be-a6aa-70cf3b2139bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.381570] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 865.381570] env[62204]: value = "task-1199924" [ 865.381570] env[62204]: _type = "Task" [ 865.381570] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.392892] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.517879] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 865.518200] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36ce0241-4380-4510-bcd6-defd9b23967e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.526165] env[62204]: DEBUG oslo_vmware.api [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 865.526165] env[62204]: value = "task-1199925" [ 865.526165] env[62204]: _type = "Task" [ 865.526165] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.536506] env[62204]: DEBUG oslo_vmware.api [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199925, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.794861] env[62204]: DEBUG nova.scheduler.client.report [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 865.893601] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199924, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.036887] env[62204]: DEBUG oslo_vmware.api [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199925, 'name': PowerOffVM_Task, 'duration_secs': 0.285783} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.037317] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.037546] env[62204]: DEBUG nova.compute.manager [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.038409] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b377ff96-6658-4713-a87a-4165a09ae7a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.290746] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 866.300338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.302605] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.642s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.303056] env[62204]: DEBUG nova.objects.instance [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lazy-loading 'resources' on Instance uuid 2c393123-87de-460a-965d-43473478a79f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 866.314181] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 866.314460] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 866.314625] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 866.314809] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 866.314962] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 866.315292] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 866.315530] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 866.315697] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 866.315872] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 866.316084] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 866.316279] env[62204]: DEBUG nova.virt.hardware [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 866.317427] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08511207-da07-40a0-ad6e-e625973eeca1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.322591] env[62204]: INFO nova.scheduler.client.report [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted allocations for instance dba1edda-edfd-4a97-ab95-48f3f5a933f8 [ 866.330031] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a888b4b-6a07-4769-b239-f2cc6f9679d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.393316] env[62204]: DEBUG oslo_vmware.api [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199924, 'name': PowerOnVM_Task, 'duration_secs': 0.653751} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.393636] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.393854] env[62204]: INFO nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Took 6.41 seconds to spawn the instance on the hypervisor. [ 866.394075] env[62204]: DEBUG nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.394993] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8c7770-ff20-411b-8ce2-35f731f190a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.552601] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a8fbd508-18cb-4d60-934f-78b87948e9df tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.832703] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cfa3fb2d-bece-44f0-8e78-09d0fa73e07b tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "dba1edda-edfd-4a97-ab95-48f3f5a933f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.092s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.872341] env[62204]: DEBUG nova.compute.manager [req-9b89ac27-8e50-4784-976d-71efd6ded89a req-13f29a4f-fecc-4293-b0e3-80931444ad6c service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Received event network-vif-plugged-5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 866.872580] env[62204]: DEBUG oslo_concurrency.lockutils [req-9b89ac27-8e50-4784-976d-71efd6ded89a req-13f29a4f-fecc-4293-b0e3-80931444ad6c service nova] Acquiring lock "62605b48-e640-4b4d-ab77-1ed44a75daa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.872834] env[62204]: DEBUG oslo_concurrency.lockutils [req-9b89ac27-8e50-4784-976d-71efd6ded89a req-13f29a4f-fecc-4293-b0e3-80931444ad6c service nova] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.873189] env[62204]: DEBUG oslo_concurrency.lockutils [req-9b89ac27-8e50-4784-976d-71efd6ded89a req-13f29a4f-fecc-4293-b0e3-80931444ad6c service nova] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.873433] env[62204]: DEBUG nova.compute.manager [req-9b89ac27-8e50-4784-976d-71efd6ded89a req-13f29a4f-fecc-4293-b0e3-80931444ad6c service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] No waiting events found dispatching network-vif-plugged-5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 866.873617] env[62204]: WARNING nova.compute.manager [req-9b89ac27-8e50-4784-976d-71efd6ded89a req-13f29a4f-fecc-4293-b0e3-80931444ad6c service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Received unexpected event network-vif-plugged-5d075538-3e94-4457-b88a-3dcde88f99db for instance with vm_state building and task_state spawning. [ 866.914728] env[62204]: INFO nova.compute.manager [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Took 42.71 seconds to build instance. [ 867.249795] env[62204]: DEBUG nova.network.neutron [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Successfully updated port: 5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.273729] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6141939d-be86-4c56-9ce5-cef9b5a3f829 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.284219] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54100690-8b69-45ac-9086-520ae398e009 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.319817] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cde02e1-42cf-485f-867f-3c738685177b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.330267] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6212691a-d90d-49d9-8119-8b2439a3ba45 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.348411] env[62204]: DEBUG nova.compute.provider_tree [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.416224] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a0b6fdf0-b460-49eb-a002-b0317782f887 tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "0a720922-60ea-4b31-ba56-cdcbba1ab629" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.533s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.484083] env[62204]: DEBUG nova.objects.instance [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 867.593241] env[62204]: INFO nova.compute.manager [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Rebuilding instance [ 867.632018] env[62204]: DEBUG nova.compute.manager [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.633257] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d833f00-e0ed-4a4b-a192-b4cb94e806c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.753407] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-62605b48-e640-4b4d-ab77-1ed44a75daa3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.753595] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-62605b48-e640-4b4d-ab77-1ed44a75daa3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.753751] env[62204]: DEBUG nova.network.neutron [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 867.851805] env[62204]: DEBUG nova.scheduler.client.report [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 867.990189] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.990439] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.990685] env[62204]: DEBUG nova.network.neutron [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 867.990879] env[62204]: DEBUG nova.objects.instance [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'info_cache' on Instance uuid 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 868.147539] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.147834] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c06c560-8518-408f-976d-744faa1afd21 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.156494] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 868.156494] env[62204]: value = "task-1199926" [ 868.156494] env[62204]: _type = "Task" [ 868.156494] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.166982] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.289778] env[62204]: DEBUG nova.network.neutron [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 868.357329] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 868.360771] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.765s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 868.362637] env[62204]: INFO nova.compute.claims [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.388281] env[62204]: INFO nova.scheduler.client.report [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocations for instance 2c393123-87de-460a-965d-43473478a79f [ 868.494227] env[62204]: DEBUG nova.objects.base [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Object Instance<4793e9fd-be87-4885-8f0e-1fcef6ce4d2f> lazy-loaded attributes: flavor,info_cache {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 868.564575] env[62204]: DEBUG nova.network.neutron [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Updating instance_info_cache with network_info: [{"id": "5d075538-3e94-4457-b88a-3dcde88f99db", "address": "fa:16:3e:d2:73:3a", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d075538-3e", "ovs_interfaceid": "5d075538-3e94-4457-b88a-3dcde88f99db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.667672] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199926, 'name': PowerOffVM_Task, 'duration_secs': 0.215509} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.668129] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 868.668453] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.669341] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7f1579-c478-42f4-8e32-1ee91ecc4f99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.679586] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 868.679887] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a05943ab-36fe-487a-a83a-7d5e28c72c0a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.710713] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 868.710713] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 868.710970] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Deleting the datastore file [datastore2] 0a720922-60ea-4b31-ba56-cdcbba1ab629 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 868.711928] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b448fcb2-2db6-4a31-9f93-8183194130c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.720768] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 868.720768] env[62204]: value = "task-1199928" [ 868.720768] env[62204]: _type = "Task" [ 868.720768] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.733939] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199928, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.903422] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4084995b-91d5-44e2-ab6c-e16e7c322561 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2c393123-87de-460a-965d-43473478a79f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.402s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.067503] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-62605b48-e640-4b4d-ab77-1ed44a75daa3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.070129] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Instance network_info: |[{"id": "5d075538-3e94-4457-b88a-3dcde88f99db", "address": "fa:16:3e:d2:73:3a", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d075538-3e", "ovs_interfaceid": "5d075538-3e94-4457-b88a-3dcde88f99db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 869.070129] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:73:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d075538-3e94-4457-b88a-3dcde88f99db', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.077099] env[62204]: DEBUG oslo.service.loopingcall [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.079122] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.081403] env[62204]: DEBUG nova.compute.manager [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Received event network-changed-5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.083090] env[62204]: DEBUG nova.compute.manager [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Refreshing instance network info cache due to event network-changed-5d075538-3e94-4457-b88a-3dcde88f99db. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 869.083090] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] Acquiring lock "refresh_cache-62605b48-e640-4b4d-ab77-1ed44a75daa3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.083090] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] Acquired lock "refresh_cache-62605b48-e640-4b4d-ab77-1ed44a75daa3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.083090] env[62204]: DEBUG nova.network.neutron [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Refreshing network info cache for port 5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 869.084176] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d6c5e73-bff3-4ded-baf6-da2255ad761f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.114450] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.114450] env[62204]: value = "task-1199929" [ 869.114450] env[62204]: _type = "Task" [ 869.114450] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.126254] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199929, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.233428] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199928, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246335} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.238602] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.238933] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.239207] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.362991] env[62204]: DEBUG nova.network.neutron [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Updated VIF entry in instance network info cache for port 5d075538-3e94-4457-b88a-3dcde88f99db. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 869.363462] env[62204]: DEBUG nova.network.neutron [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Updating instance_info_cache with network_info: [{"id": "5d075538-3e94-4457-b88a-3dcde88f99db", "address": "fa:16:3e:d2:73:3a", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d075538-3e", "ovs_interfaceid": "5d075538-3e94-4457-b88a-3dcde88f99db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.374530] env[62204]: DEBUG nova.network.neutron [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating instance_info_cache with network_info: [{"id": "ac345dde-4672-4c9d-a224-24ebc7900628", "address": "fa:16:3e:41:0f:f2", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac345dde-46", "ovs_interfaceid": "ac345dde-4672-4c9d-a224-24ebc7900628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.628019] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199929, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.765587] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52081a61-083a-4696-3690-5c32ee6369bc/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 869.766809] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de56d2c8-e527-42ff-8903-77a1f7420e96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.776981] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52081a61-083a-4696-3690-5c32ee6369bc/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 869.777227] env[62204]: ERROR oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52081a61-083a-4696-3690-5c32ee6369bc/disk-0.vmdk due to incomplete transfer. [ 869.779057] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3a28c8bb-3f31-4c21-92a1-d1540e76fb3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.781097] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f211e1b-0d94-4e63-8941-412ed05e22d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.789828] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6cc923c-c1be-476d-81de-1413fbb7ff72 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.794193] env[62204]: DEBUG oslo_vmware.rw_handles [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52081a61-083a-4696-3690-5c32ee6369bc/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 869.794424] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Uploaded image 756c1996-a6d4-4dc1-b787-af7e2b1a2b97 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 869.796630] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 869.797334] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-577731f3-11a5-454c-813d-7e8e3ce530d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.835919] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87324a47-6ad3-43e0-8d7e-60411bf207b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.838957] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 869.838957] env[62204]: value = "task-1199930" [ 869.838957] env[62204]: _type = "Task" [ 869.838957] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.846537] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a579b366-d4aa-4844-850b-3d25237316ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.854395] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199930, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.865325] env[62204]: DEBUG nova.compute.provider_tree [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.867877] env[62204]: DEBUG oslo_concurrency.lockutils [req-c1cff672-cb35-41c1-af6a-07ccce1fc9e0 req-64b95c27-f605-4061-8438-a4b69fa89915 service nova] Releasing lock "refresh_cache-62605b48-e640-4b4d-ab77-1ed44a75daa3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.877824] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "refresh_cache-4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.127372] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199929, 'name': CreateVM_Task, 'duration_secs': 0.534621} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.127666] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.128521] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.128700] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.129046] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 870.129331] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76451d89-ebf3-42bf-9c5e-970361aa1212 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.134750] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 870.134750] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5255b2bf-60a0-391f-5bf9-80cb5c86f3ac" [ 870.134750] env[62204]: _type = "Task" [ 870.134750] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.143433] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5255b2bf-60a0-391f-5bf9-80cb5c86f3ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.271649] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 870.271919] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 870.272128] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.272321] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 870.272435] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.272591] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 870.272808] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 870.272973] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 870.273161] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 870.273333] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 870.273512] env[62204]: DEBUG nova.virt.hardware [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 870.274412] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45c93f1-de47-444f-904c-0f1b22620a05 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.283202] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ff149c-6d23-419f-9d83-4f3f1d79c437 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.296634] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 870.302164] env[62204]: DEBUG oslo.service.loopingcall [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.302399] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 870.302610] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8a6aa8f-7ca7-4f34-b2a2-ca643d24f9c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.319040] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 870.319040] env[62204]: value = "task-1199931" [ 870.319040] env[62204]: _type = "Task" [ 870.319040] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.326848] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199931, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.352024] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199930, 'name': Destroy_Task, 'duration_secs': 0.361468} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.352024] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Destroyed the VM [ 870.352024] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 870.352024] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c05d9b01-45ce-4930-8dd7-fad1f6f028b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.356954] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 870.356954] env[62204]: value = "task-1199932" [ 870.356954] env[62204]: _type = "Task" [ 870.356954] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.365760] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199932, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.369846] env[62204]: DEBUG nova.scheduler.client.report [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 870.383042] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.383042] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-914934d1-0b13-4d82-9e6b-ced7d180956b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.389585] env[62204]: DEBUG oslo_vmware.api [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 870.389585] env[62204]: value = "task-1199933" [ 870.389585] env[62204]: _type = "Task" [ 870.389585] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.401659] env[62204]: DEBUG oslo_vmware.api [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.648438] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5255b2bf-60a0-391f-5bf9-80cb5c86f3ac, 'name': SearchDatastore_Task, 'duration_secs': 0.013646} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.648681] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.648871] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.649311] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.649493] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.649688] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.650023] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-baee0c72-7abc-41ec-b320-c4c7af1bbf98 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.660936] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.661172] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.661948] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa43f5ce-8795-418e-b0fe-b60e73f658a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.668948] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 870.668948] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528815d2-447a-4059-c6c0-60e638984af3" [ 870.668948] env[62204]: _type = "Task" [ 870.668948] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.678861] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528815d2-447a-4059-c6c0-60e638984af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.830280] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199931, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.868243] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199932, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.878022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.878022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.472s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.878800] env[62204]: INFO nova.compute.claims [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 870.902729] env[62204]: DEBUG oslo_vmware.api [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1199933, 'name': PowerOnVM_Task, 'duration_secs': 0.458906} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.903380] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.903909] env[62204]: DEBUG nova.compute.manager [None req-2ac69ca5-2dcd-4b17-a0b4-fc2dcb64ddcf tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.905138] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4e75af-a8da-496d-ae1b-99d4f0bb947b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.183308] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528815d2-447a-4059-c6c0-60e638984af3, 'name': SearchDatastore_Task, 'duration_secs': 0.012195} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.183620] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-feb7a1a8-fc10-4867-82f8-06c0263a43cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.190683] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 871.190683] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a79681-1e3d-a75d-3344-ac5ea1199cbb" [ 871.190683] env[62204]: _type = "Task" [ 871.190683] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.201023] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a79681-1e3d-a75d-3344-ac5ea1199cbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.330639] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199931, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.367621] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199932, 'name': RemoveSnapshot_Task} progress is 70%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.386549] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "74740d21-0eb7-435b-9957-1a9f26693771" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.386549] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "74740d21-0eb7-435b-9957-1a9f26693771" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.702621] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a79681-1e3d-a75d-3344-ac5ea1199cbb, 'name': SearchDatastore_Task, 'duration_secs': 0.011274} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.703036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.703399] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 62605b48-e640-4b4d-ab77-1ed44a75daa3/62605b48-e640-4b4d-ab77-1ed44a75daa3.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.703745] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7687d8e7-c00a-43b1-a00e-39e3eb579128 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.710435] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 871.710435] env[62204]: value = "task-1199934" [ 871.710435] env[62204]: _type = "Task" [ 871.710435] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.719562] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.830925] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199931, 'name': CreateVM_Task, 'duration_secs': 1.395885} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.831172] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.831699] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.831896] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.832298] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 871.832589] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25958840-bcc3-4ad9-b24d-6d075a51d4bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.838624] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 871.838624] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524e43c5-bd43-e714-3fa9-e219ef42c1c8" [ 871.838624] env[62204]: _type = "Task" [ 871.838624] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.847889] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524e43c5-bd43-e714-3fa9-e219ef42c1c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.867232] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199932, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.891702] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "74740d21-0eb7-435b-9957-1a9f26693771" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.892228] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 872.221283] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471758} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.221606] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 62605b48-e640-4b4d-ab77-1ed44a75daa3/62605b48-e640-4b4d-ab77-1ed44a75daa3.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.221782] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.222045] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8049095-4680-490f-9626-e95f6d693c4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.230700] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 872.230700] env[62204]: value = "task-1199935" [ 872.230700] env[62204]: _type = "Task" [ 872.230700] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.239471] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199935, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.296899] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d75a2a3-e155-4a7b-a949-0d32a31fff77 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.304772] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cefef6-ec46-45e3-ae66-280dc7dd84d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.334713] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b1bf8c-edd0-4aaf-a198-b24595cdba92 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.345371] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cc8ebb-fb4e-4919-a37a-407f0885e2d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.354446] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524e43c5-bd43-e714-3fa9-e219ef42c1c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010296} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.363430] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.363714] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.363989] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 872.364152] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.364335] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.364845] env[62204]: DEBUG nova.compute.provider_tree [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.366024] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73d14dc4-d925-4908-972f-13cfaff7c0db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.378371] env[62204]: DEBUG oslo_vmware.api [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199932, 'name': RemoveSnapshot_Task, 'duration_secs': 1.521287} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.379505] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 872.379762] env[62204]: INFO nova.compute.manager [None req-02938b9a-f3c7-40c4-80a3-8c89856213e8 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Took 17.78 seconds to snapshot the instance on the hypervisor. [ 872.382076] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.382295] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.383489] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5d7f294-99d3-410f-807b-828123d898a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.389835] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 872.389835] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e40c4a-d179-4d87-cb0d-dcd38f7141f5" [ 872.389835] env[62204]: _type = "Task" [ 872.389835] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.397376] env[62204]: DEBUG nova.compute.utils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 872.402230] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 872.402444] env[62204]: DEBUG nova.network.neutron [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 872.405792] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e40c4a-d179-4d87-cb0d-dcd38f7141f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.473329] env[62204]: DEBUG nova.policy [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4a705501bdc4c119ca58cc23480deb9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c275cfd5c1046e087bc7e1a3dc0c10c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 872.742118] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199935, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.328471} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.742421] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.743365] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cd7ca4-6d0c-48e3-9c9d-bd5abd5628c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.766214] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 62605b48-e640-4b4d-ab77-1ed44a75daa3/62605b48-e640-4b4d-ab77-1ed44a75daa3.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.767032] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e716d9f4-fede-4539-a263-62747335ac3e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.787985] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 872.787985] env[62204]: value = "task-1199936" [ 872.787985] env[62204]: _type = "Task" [ 872.787985] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.796459] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.861689] env[62204]: DEBUG nova.network.neutron [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Successfully created port: 598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.872532] env[62204]: DEBUG nova.scheduler.client.report [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 872.901579] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e40c4a-d179-4d87-cb0d-dcd38f7141f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010373} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.902463] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3872f26c-8c8e-48ee-8929-9a036118d2bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.907059] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 872.912101] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 872.912101] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203de99-d776-e49e-afd3-7dbeed0a2794" [ 872.912101] env[62204]: _type = "Task" [ 872.912101] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.923876] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203de99-d776-e49e-afd3-7dbeed0a2794, 'name': SearchDatastore_Task, 'duration_secs': 0.012112} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.924518] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.924830] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 872.925150] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dae25dc-44f6-447d-9680-5bbc42133eae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.934089] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 872.934089] env[62204]: value = "task-1199937" [ 872.934089] env[62204]: _type = "Task" [ 872.934089] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.944031] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.300055] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199936, 'name': ReconfigVM_Task, 'duration_secs': 0.308107} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.300396] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 62605b48-e640-4b4d-ab77-1ed44a75daa3/62605b48-e640-4b4d-ab77-1ed44a75daa3.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.301090] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab15f354-f3f8-413b-ab0c-c735ad29bd52 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.309425] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 873.309425] env[62204]: value = "task-1199938" [ 873.309425] env[62204]: _type = "Task" [ 873.309425] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.320774] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199938, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.379232] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.502s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.379802] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 873.382549] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.859s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.382788] env[62204]: DEBUG nova.objects.instance [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lazy-loading 'resources' on Instance uuid 25563dec-7e4d-42d9-b922-0b2354b5d70e {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.445795] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199937, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.820063] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199938, 'name': Rename_Task, 'duration_secs': 0.245656} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.820236] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.820462] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87f9c9bb-4089-4c39-8db9-ab30db2b10b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.827197] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 873.827197] env[62204]: value = "task-1199939" [ 873.827197] env[62204]: _type = "Task" [ 873.827197] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.835163] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.889034] env[62204]: DEBUG nova.compute.utils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 873.890648] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 873.890817] env[62204]: DEBUG nova.network.neutron [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 873.915896] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 873.935716] env[62204]: DEBUG nova.policy [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2edea246e74173bbdb4365d0309cd7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be5f3f8b28ab4b63a2621b1fe1383af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 873.944993] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 873.945268] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 873.945431] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.945615] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 873.945819] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.945966] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 873.946226] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 873.946401] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 873.946570] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 873.946736] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 873.947414] env[62204]: DEBUG nova.virt.hardware [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 873.947758] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee62d79d-8f14-4ad3-a3f2-71849f662758 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.955616] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512627} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.956751] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.956751] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.956957] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bdd3da8-c446-49a5-b3d8-5a5deef51952 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.962227] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad6ae19-f871-4471-a81a-85965b8e0d14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.970893] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 873.970893] env[62204]: value = "task-1199940" [ 873.970893] env[62204]: _type = "Task" [ 873.970893] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.992916] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.224946] env[62204]: DEBUG nova.compute.manager [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 874.226468] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b402cd-357f-4a88-96c7-0d07ec0543c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.229938] env[62204]: DEBUG nova.network.neutron [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Successfully created port: 830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 874.324869] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d48928d-6c15-4c37-be83-dbba9959fc31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.337017] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199939, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.339755] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358fa2cd-0aff-4811-a90a-f8af198c7231 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.376454] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94296e80-1206-4b89-8c44-a8bd0726acc4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.385255] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431b4457-ab80-4581-b13c-4c9b352d2c4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.400281] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 874.403140] env[62204]: DEBUG nova.compute.provider_tree [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.443330] env[62204]: DEBUG nova.compute.manager [req-490634b5-0961-4314-8e8c-44f8cdcdaefd req-57ce90d3-ad3b-4451-85a0-ac42203d080e service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Received event network-vif-plugged-598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 874.443330] env[62204]: DEBUG oslo_concurrency.lockutils [req-490634b5-0961-4314-8e8c-44f8cdcdaefd req-57ce90d3-ad3b-4451-85a0-ac42203d080e service nova] Acquiring lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 874.443330] env[62204]: DEBUG oslo_concurrency.lockutils [req-490634b5-0961-4314-8e8c-44f8cdcdaefd req-57ce90d3-ad3b-4451-85a0-ac42203d080e service nova] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.443330] env[62204]: DEBUG oslo_concurrency.lockutils [req-490634b5-0961-4314-8e8c-44f8cdcdaefd req-57ce90d3-ad3b-4451-85a0-ac42203d080e service nova] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.444406] env[62204]: DEBUG nova.compute.manager [req-490634b5-0961-4314-8e8c-44f8cdcdaefd req-57ce90d3-ad3b-4451-85a0-ac42203d080e service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] No waiting events found dispatching network-vif-plugged-598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 874.444406] env[62204]: WARNING nova.compute.manager [req-490634b5-0961-4314-8e8c-44f8cdcdaefd req-57ce90d3-ad3b-4451-85a0-ac42203d080e service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Received unexpected event network-vif-plugged-598f0c6c-7d5c-4ecf-bbde-94d1236e560c for instance with vm_state building and task_state spawning. [ 874.481405] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218702} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.481749] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.482718] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d20729-9083-4ce8-8b55-c8864aa1ed66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.507510] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.507818] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46307214-a8f9-40db-9091-5848a4e7e0fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.530543] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 874.530543] env[62204]: value = "task-1199941" [ 874.530543] env[62204]: _type = "Task" [ 874.530543] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.539537] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199941, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.581566] env[62204]: DEBUG nova.network.neutron [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Successfully updated port: 598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.745323] env[62204]: INFO nova.compute.manager [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] instance snapshotting [ 874.748763] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603d736b-8aec-43f6-8e77-4f6c7e9891db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.770287] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a1c929-9661-4d0c-b0a0-d8c86c5cfa41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.838619] env[62204]: DEBUG oslo_vmware.api [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199939, 'name': PowerOnVM_Task, 'duration_secs': 0.863436} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.838973] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.839222] env[62204]: INFO nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Took 8.55 seconds to spawn the instance on the hypervisor. [ 874.839440] env[62204]: DEBUG nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 874.840300] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ab6e51-4a3b-4dad-8cb6-f50a79742f28 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.910022] env[62204]: DEBUG nova.scheduler.client.report [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.040817] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199941, 'name': ReconfigVM_Task, 'duration_secs': 0.401709} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.041120] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 0a720922-60ea-4b31-ba56-cdcbba1ab629/0a720922-60ea-4b31-ba56-cdcbba1ab629.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.041742] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e75c49d0-1ccf-4f29-aa60-a19d60551915 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.049267] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 875.049267] env[62204]: value = "task-1199942" [ 875.049267] env[62204]: _type = "Task" [ 875.049267] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.057948] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199942, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.084913] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "refresh_cache-211ca0c1-cf05-4148-ad5c-46cbbd72278e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.085255] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquired lock "refresh_cache-211ca0c1-cf05-4148-ad5c-46cbbd72278e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.085506] env[62204]: DEBUG nova.network.neutron [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 875.282110] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 875.282446] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-96d06026-f928-4eb1-a0da-4d926982d44b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.290686] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 875.290686] env[62204]: value = "task-1199943" [ 875.290686] env[62204]: _type = "Task" [ 875.290686] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.299287] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199943, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.359889] env[62204]: INFO nova.compute.manager [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Took 47.52 seconds to build instance. [ 875.414676] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 875.417294] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.035s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.421516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.535s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.443880] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 875.444245] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 875.444547] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 875.444635] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 875.444804] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 875.444962] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 875.445224] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 875.445434] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 875.446024] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 875.446024] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 875.446024] env[62204]: DEBUG nova.virt.hardware [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 875.447246] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe301721-3e96-4ae7-aaa8-bd404795c6b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.451742] env[62204]: INFO nova.scheduler.client.report [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Deleted allocations for instance 25563dec-7e4d-42d9-b922-0b2354b5d70e [ 875.464504] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd4a90-0d13-40fc-9974-e08ea5b2b5b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.561068] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199942, 'name': Rename_Task, 'duration_secs': 0.155386} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.561481] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.561791] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b2de344-72f4-4997-ad7e-4113ba40f511 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.570546] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 875.570546] env[62204]: value = "task-1199944" [ 875.570546] env[62204]: _type = "Task" [ 875.570546] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.579487] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.639050] env[62204]: DEBUG nova.network.neutron [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 875.734225] env[62204]: DEBUG nova.compute.manager [req-d4fffd9d-bacb-4525-b5b6-3de8d0fb8ac8 req-c4d4140d-1565-48a5-9122-ca2bd287791f service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Received event network-vif-plugged-830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 875.734225] env[62204]: DEBUG oslo_concurrency.lockutils [req-d4fffd9d-bacb-4525-b5b6-3de8d0fb8ac8 req-c4d4140d-1565-48a5-9122-ca2bd287791f service nova] Acquiring lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.734225] env[62204]: DEBUG oslo_concurrency.lockutils [req-d4fffd9d-bacb-4525-b5b6-3de8d0fb8ac8 req-c4d4140d-1565-48a5-9122-ca2bd287791f service nova] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.734225] env[62204]: DEBUG oslo_concurrency.lockutils [req-d4fffd9d-bacb-4525-b5b6-3de8d0fb8ac8 req-c4d4140d-1565-48a5-9122-ca2bd287791f service nova] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.734436] env[62204]: DEBUG nova.compute.manager [req-d4fffd9d-bacb-4525-b5b6-3de8d0fb8ac8 req-c4d4140d-1565-48a5-9122-ca2bd287791f service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] No waiting events found dispatching network-vif-plugged-830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 875.734624] env[62204]: WARNING nova.compute.manager [req-d4fffd9d-bacb-4525-b5b6-3de8d0fb8ac8 req-c4d4140d-1565-48a5-9122-ca2bd287791f service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Received unexpected event network-vif-plugged-830a7992-393b-4d36-82d8-b660d6904ae7 for instance with vm_state building and task_state spawning. [ 875.788792] env[62204]: DEBUG nova.network.neutron [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Successfully updated port: 830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 875.804073] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199943, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.862200] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3bdd1c96-ac38-45e1-9abc-12b1f06cd350 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.821s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.870122] env[62204]: DEBUG nova.network.neutron [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Updating instance_info_cache with network_info: [{"id": "598f0c6c-7d5c-4ecf-bbde-94d1236e560c", "address": "fa:16:3e:d9:3e:85", "network": {"id": "b449a37e-791f-4290-abb3-decdc4b05a70", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1492388731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c275cfd5c1046e087bc7e1a3dc0c10c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap598f0c6c-7d", "ovs_interfaceid": "598f0c6c-7d5c-4ecf-bbde-94d1236e560c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.927583] env[62204]: INFO nova.compute.claims [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.962047] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1e9502f0-171a-4875-842b-852f214d86ea tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "25563dec-7e4d-42d9-b922-0b2354b5d70e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.317s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.081745] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199944, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.294152] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.294152] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.294152] env[62204]: DEBUG nova.network.neutron [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 876.307105] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199943, 'name': CreateSnapshot_Task, 'duration_secs': 0.62784} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.308344] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 876.309287] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53757a8e-b9e2-40c0-8e06-3853fcdd2eda {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.373610] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Releasing lock "refresh_cache-211ca0c1-cf05-4148-ad5c-46cbbd72278e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.373957] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Instance network_info: |[{"id": "598f0c6c-7d5c-4ecf-bbde-94d1236e560c", "address": "fa:16:3e:d9:3e:85", "network": {"id": "b449a37e-791f-4290-abb3-decdc4b05a70", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1492388731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c275cfd5c1046e087bc7e1a3dc0c10c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap598f0c6c-7d", "ovs_interfaceid": "598f0c6c-7d5c-4ecf-bbde-94d1236e560c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 876.374500] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:3e:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a316376e-2ef0-4b1e-b40c-10321ebd7e1a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '598f0c6c-7d5c-4ecf-bbde-94d1236e560c', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.382293] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Creating folder: Project (5c275cfd5c1046e087bc7e1a3dc0c10c). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.382936] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2465b2a7-8fb4-498f-9347-075ec4fdf04e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.396972] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Created folder: Project (5c275cfd5c1046e087bc7e1a3dc0c10c) in parent group-v259933. [ 876.397343] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Creating folder: Instances. Parent ref: group-v260077. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.397761] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c44efa7-867c-44f1-88a9-d4dae8b561bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.411959] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Created folder: Instances in parent group-v260077. [ 876.412252] env[62204]: DEBUG oslo.service.loopingcall [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.412467] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.412799] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8733cb6-8a97-42a9-af82-afd292af47c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.434465] env[62204]: INFO nova.compute.resource_tracker [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating resource usage from migration ac075a5b-5a18-471b-b2b2-598795397010 [ 876.438999] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.438999] env[62204]: value = "task-1199947" [ 876.438999] env[62204]: _type = "Task" [ 876.438999] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.448057] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199947, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.529636] env[62204]: DEBUG nova.compute.manager [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Received event network-changed-598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 876.529867] env[62204]: DEBUG nova.compute.manager [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Refreshing instance network info cache due to event network-changed-598f0c6c-7d5c-4ecf-bbde-94d1236e560c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 876.530061] env[62204]: DEBUG oslo_concurrency.lockutils [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] Acquiring lock "refresh_cache-211ca0c1-cf05-4148-ad5c-46cbbd72278e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 876.530216] env[62204]: DEBUG oslo_concurrency.lockutils [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] Acquired lock "refresh_cache-211ca0c1-cf05-4148-ad5c-46cbbd72278e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.530481] env[62204]: DEBUG nova.network.neutron [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Refreshing network info cache for port 598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 876.585023] env[62204]: DEBUG oslo_vmware.api [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199944, 'name': PowerOnVM_Task, 'duration_secs': 0.517331} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.585321] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 876.585546] env[62204]: DEBUG nova.compute.manager [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 876.586515] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a09b6bc-5f42-4e33-9b6f-e2aabd2a1ddb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.831282] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 876.831632] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-17100d3a-6057-44fe-a449-e5b717a79d28 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.842772] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 876.842772] env[62204]: value = "task-1199948" [ 876.842772] env[62204]: _type = "Task" [ 876.842772] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.853762] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199948, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.861061] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb78116-b79a-4fcf-8370-01ea4d62edac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.870067] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f596b2-b97a-4396-a69a-e3d819e49e90 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.906817] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39a52f3-5a2c-4d38-8292-7a739b806737 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.915109] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83be8f51-e3f5-4d23-b188-6a55628c0858 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.933173] env[62204]: DEBUG nova.compute.provider_tree [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 876.949972] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199947, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.000723] env[62204]: DEBUG nova.network.neutron [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 877.109489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.255928] env[62204]: DEBUG nova.network.neutron [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance_info_cache with network_info: [{"id": "830a7992-393b-4d36-82d8-b660d6904ae7", "address": "fa:16:3e:01:7a:45", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap830a7992-39", "ovs_interfaceid": "830a7992-393b-4d36-82d8-b660d6904ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.339286] env[62204]: DEBUG nova.network.neutron [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Updated VIF entry in instance network info cache for port 598f0c6c-7d5c-4ecf-bbde-94d1236e560c. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 877.339286] env[62204]: DEBUG nova.network.neutron [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Updating instance_info_cache with network_info: [{"id": "598f0c6c-7d5c-4ecf-bbde-94d1236e560c", "address": "fa:16:3e:d9:3e:85", "network": {"id": "b449a37e-791f-4290-abb3-decdc4b05a70", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1492388731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c275cfd5c1046e087bc7e1a3dc0c10c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a316376e-2ef0-4b1e-b40c-10321ebd7e1a", "external-id": "nsx-vlan-transportzone-942", "segmentation_id": 942, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap598f0c6c-7d", "ovs_interfaceid": "598f0c6c-7d5c-4ecf-bbde-94d1236e560c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.354942] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199948, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.436932] env[62204]: DEBUG nova.scheduler.client.report [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.451589] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199947, 'name': CreateVM_Task, 'duration_secs': 0.557035} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.451794] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.452644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.452820] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.453396] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.454358] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba7afdc-392d-4e2d-9767-ebe2920499e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.460599] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 877.460599] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b0cd0d-4d5a-ce30-3682-d3528bf52e0b" [ 877.460599] env[62204]: _type = "Task" [ 877.460599] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.470234] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b0cd0d-4d5a-ce30-3682-d3528bf52e0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.575155] env[62204]: DEBUG oslo_concurrency.lockutils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 877.575394] env[62204]: DEBUG oslo_concurrency.lockutils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.759074] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.759431] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Instance network_info: |[{"id": "830a7992-393b-4d36-82d8-b660d6904ae7", "address": "fa:16:3e:01:7a:45", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap830a7992-39", "ovs_interfaceid": "830a7992-393b-4d36-82d8-b660d6904ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 877.760186] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:7a:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '830a7992-393b-4d36-82d8-b660d6904ae7', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.767728] env[62204]: DEBUG oslo.service.loopingcall [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 877.767871] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.768116] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a110de19-ff40-4d03-83e8-afc9bd0821a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.790059] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.790059] env[62204]: value = "task-1199949" [ 877.790059] env[62204]: _type = "Task" [ 877.790059] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.799266] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199949, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.843428] env[62204]: DEBUG oslo_concurrency.lockutils [req-bcf2f56c-3a72-42dd-a764-75b779cff49f req-4e254568-07db-4dd2-8a24-5d448be31ad8 service nova] Releasing lock "refresh_cache-211ca0c1-cf05-4148-ad5c-46cbbd72278e" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.855649] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199948, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.866507] env[62204]: DEBUG nova.compute.manager [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Received event network-changed-830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 877.866716] env[62204]: DEBUG nova.compute.manager [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Refreshing instance network info cache due to event network-changed-830a7992-393b-4d36-82d8-b660d6904ae7. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 877.867049] env[62204]: DEBUG oslo_concurrency.lockutils [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] Acquiring lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.867217] env[62204]: DEBUG oslo_concurrency.lockutils [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] Acquired lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.867406] env[62204]: DEBUG nova.network.neutron [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Refreshing network info cache for port 830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 877.946719] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.525s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 877.946940] env[62204]: INFO nova.compute.manager [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Migrating [ 877.947357] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.947542] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.948904] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.196s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 877.950397] env[62204]: INFO nova.compute.claims [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 877.974396] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b0cd0d-4d5a-ce30-3682-d3528bf52e0b, 'name': SearchDatastore_Task, 'duration_secs': 0.015685} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.974896] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.975312] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.975715] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.976029] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.976361] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.976807] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b912e07-3bda-459b-a89e-9d3799afceeb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.988464] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.988721] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.989518] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e51ed70-ecc6-4726-a263-3da01b2179fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.997382] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 877.997382] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52716757-7390-5fed-353c-4dc239ebfde8" [ 877.997382] env[62204]: _type = "Task" [ 877.997382] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.008293] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52716757-7390-5fed-353c-4dc239ebfde8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.079598] env[62204]: DEBUG nova.compute.utils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.217359] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "0a720922-60ea-4b31-ba56-cdcbba1ab629" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.217745] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "0a720922-60ea-4b31-ba56-cdcbba1ab629" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.218012] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "0a720922-60ea-4b31-ba56-cdcbba1ab629-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.218251] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "0a720922-60ea-4b31-ba56-cdcbba1ab629-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.218487] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "0a720922-60ea-4b31-ba56-cdcbba1ab629-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.221966] env[62204]: INFO nova.compute.manager [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Terminating instance [ 878.224403] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "refresh_cache-0a720922-60ea-4b31-ba56-cdcbba1ab629" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.224403] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquired lock "refresh_cache-0a720922-60ea-4b31-ba56-cdcbba1ab629" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.224636] env[62204]: DEBUG nova.network.neutron [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.300981] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199949, 'name': CreateVM_Task, 'duration_secs': 0.455158} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.301899] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.302071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.302781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.302781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 878.302966] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eef4ebd8-10b6-4574-a22e-c6a6f730d948 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.309061] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 878.309061] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52852820-d3c5-efc9-32df-771da643a29a" [ 878.309061] env[62204]: _type = "Task" [ 878.309061] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.316438] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52852820-d3c5-efc9-32df-771da643a29a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.355385] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199948, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.454076] env[62204]: INFO nova.compute.rpcapi [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 878.454501] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.516569] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52716757-7390-5fed-353c-4dc239ebfde8, 'name': SearchDatastore_Task, 'duration_secs': 0.011486} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.517562] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8be15569-6fba-4a84-91a4-1029accc349f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.527209] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.530223] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.530509] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.530762] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.530977] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.532741] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 878.532741] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a11c84-8dac-bd91-4387-258b5427e28a" [ 878.532741] env[62204]: _type = "Task" [ 878.532741] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.536077] env[62204]: INFO nova.compute.manager [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Terminating instance [ 878.541032] env[62204]: DEBUG nova.compute.manager [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 878.541204] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.542224] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c12209-f0f3-4675-a25f-28ab4da842a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.552252] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a11c84-8dac-bd91-4387-258b5427e28a, 'name': SearchDatastore_Task, 'duration_secs': 0.01177} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.554557] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.554835] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 211ca0c1-cf05-4148-ad5c-46cbbd72278e/211ca0c1-cf05-4148-ad5c-46cbbd72278e.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.555148] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.555375] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b0f9f03-7496-430d-aff0-e5edf93c5a8d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.557302] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc128d8d-3d3c-4510-b80c-bb980806bdd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.575019] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 878.575019] env[62204]: value = "task-1199950" [ 878.575019] env[62204]: _type = "Task" [ 878.575019] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.575334] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 878.575334] env[62204]: value = "task-1199951" [ 878.575334] env[62204]: _type = "Task" [ 878.575334] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.582368] env[62204]: DEBUG oslo_concurrency.lockutils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.593521] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.593798] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.759100] env[62204]: DEBUG nova.network.neutron [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 878.809686] env[62204]: DEBUG nova.network.neutron [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updated VIF entry in instance network info cache for port 830a7992-393b-4d36-82d8-b660d6904ae7. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 878.810094] env[62204]: DEBUG nova.network.neutron [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance_info_cache with network_info: [{"id": "830a7992-393b-4d36-82d8-b660d6904ae7", "address": "fa:16:3e:01:7a:45", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap830a7992-39", "ovs_interfaceid": "830a7992-393b-4d36-82d8-b660d6904ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.827083] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52852820-d3c5-efc9-32df-771da643a29a, 'name': SearchDatastore_Task, 'duration_secs': 0.022365} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.827426] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.827678] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.827916] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.828089] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.828280] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.828570] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6375d9f-4608-46e8-978c-a60fabaf2c35 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.845224] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.845449] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.846353] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa3235b5-fc0b-4eab-acff-0beada41a54d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.860781] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 878.860781] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ed5dbe-2359-f664-f7a6-e8e38746bbe8" [ 878.860781] env[62204]: _type = "Task" [ 878.860781] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.866064] env[62204]: DEBUG nova.network.neutron [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.869196] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199948, 'name': CloneVM_Task, 'duration_secs': 1.571026} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.873475] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Created linked-clone VM from snapshot [ 878.874927] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bb0f49-57ac-4374-943f-c35d199b626a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.893291] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ed5dbe-2359-f664-f7a6-e8e38746bbe8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.893711] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Uploading image 7c0de877-a093-4216-b197-5c01c492bbe6 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 878.924826] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 878.924826] env[62204]: value = "vm-260080" [ 878.924826] env[62204]: _type = "VirtualMachine" [ 878.924826] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 878.926319] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-84dd6726-6805-4ccc-9cac-4cf8b0490bba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.936420] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lease: (returnval){ [ 878.936420] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d509a-a87d-6aff-1d94-10529413fa04" [ 878.936420] env[62204]: _type = "HttpNfcLease" [ 878.936420] env[62204]: } obtained for exporting VM: (result){ [ 878.936420] env[62204]: value = "vm-260080" [ 878.936420] env[62204]: _type = "VirtualMachine" [ 878.936420] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 878.936777] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the lease: (returnval){ [ 878.936777] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d509a-a87d-6aff-1d94-10529413fa04" [ 878.936777] env[62204]: _type = "HttpNfcLease" [ 878.936777] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 878.949309] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 878.949309] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d509a-a87d-6aff-1d94-10529413fa04" [ 878.949309] env[62204]: _type = "HttpNfcLease" [ 878.949309] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 878.965904] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e609c4c-043d-4c25-8776-58278c535028 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.978034] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 878.978241] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.978509] env[62204]: DEBUG nova.network.neutron [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 878.981136] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c77a3e3-e85f-4322-9887-c8d52b9e7396 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.016919] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086db2e1-13a3-425b-b802-2981d5d2c5b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.027440] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67151c0-ee49-435f-bb00-b0f2eed7b68e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.052313] env[62204]: DEBUG nova.compute.provider_tree [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.092768] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199950, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.097370] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199951, 'name': PowerOffVM_Task, 'duration_secs': 0.207166} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.097767] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.098069] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.098465] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7063439-9380-4a97-b114-62325c9deeb0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.190833] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.191291] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.191543] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Deleting the datastore file [datastore2] 51c9e353-f2cf-41b4-b37e-1cfd5dca0518 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.191822] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-755d64ed-4c8c-4171-9da1-3e4236045846 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.200039] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for the task: (returnval){ [ 879.200039] env[62204]: value = "task-1199954" [ 879.200039] env[62204]: _type = "Task" [ 879.200039] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.210591] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.319773] env[62204]: DEBUG oslo_concurrency.lockutils [req-fa903035-6ec8-4374-b406-e78abf62aedd req-51572c66-a6fe-4bc3-ac56-085eb1940051 service nova] Releasing lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.369101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Releasing lock "refresh_cache-0a720922-60ea-4b31-ba56-cdcbba1ab629" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.369260] env[62204]: DEBUG nova.compute.manager [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 879.369455] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.373826] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c331377-a4f9-4859-b18d-bbbd123f94f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.382792] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ed5dbe-2359-f664-f7a6-e8e38746bbe8, 'name': SearchDatastore_Task, 'duration_secs': 0.060819} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.385494] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.385733] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d90342-5775-4465-a5a4-bbee4ad2f9ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.387933] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31a97b7e-2a95-4bd2-96f8-24f4b3a03d0f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.392515] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 879.392515] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa8ddc-2beb-9fc0-9edb-06ef6e50042b" [ 879.392515] env[62204]: _type = "Task" [ 879.392515] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.396987] env[62204]: DEBUG oslo_vmware.api [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 879.396987] env[62204]: value = "task-1199955" [ 879.396987] env[62204]: _type = "Task" [ 879.396987] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.403524] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa8ddc-2beb-9fc0-9edb-06ef6e50042b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.409434] env[62204]: DEBUG oslo_vmware.api [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.451029] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 879.451029] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d509a-a87d-6aff-1d94-10529413fa04" [ 879.451029] env[62204]: _type = "HttpNfcLease" [ 879.451029] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 879.451923] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 879.451923] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520d509a-a87d-6aff-1d94-10529413fa04" [ 879.451923] env[62204]: _type = "HttpNfcLease" [ 879.451923] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 879.452133] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabf82c0-45e7-4e13-ad58-f790dec12fa9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.459957] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52091a22-c7e0-f428-8ccb-aaac64a3f721/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 879.460255] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52091a22-c7e0-f428-8ccb-aaac64a3f721/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 879.556014] env[62204]: DEBUG nova.scheduler.client.report [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 879.588195] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199950, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54884} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.588537] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 211ca0c1-cf05-4148-ad5c-46cbbd72278e/211ca0c1-cf05-4148-ad5c-46cbbd72278e.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.588766] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.589045] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1e53596-3090-4657-83fb-73b2a0fe9d0f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.598159] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 879.598159] env[62204]: value = "task-1199956" [ 879.598159] env[62204]: _type = "Task" [ 879.598159] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.610516] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199956, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.657333] env[62204]: DEBUG oslo_concurrency.lockutils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 879.657644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 879.657901] env[62204]: INFO nova.compute.manager [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Attaching volume f975a826-6b8c-4488-b353-b62effa059b3 to /dev/sdb [ 879.674251] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0fdf950a-b20f-4502-b2c6-d0af9bcb9a68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.705712] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe36ce35-5005-4b4a-a4e7-908e6c236071 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.722470] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.724954] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c670e1a9-35be-44db-845b-66a6bc6c3bfe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.748085] env[62204]: DEBUG nova.virt.block_device [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Updating existing volume attachment record: 3fd0898a-8d84-4cd8-a9ce-e8d2526b8322 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 879.821411] env[62204]: DEBUG nova.network.neutron [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.910485] env[62204]: DEBUG oslo_vmware.api [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199955, 'name': PowerOffVM_Task, 'duration_secs': 0.373004} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.914249] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.914613] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.914958] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa8ddc-2beb-9fc0-9edb-06ef6e50042b, 'name': SearchDatastore_Task, 'duration_secs': 0.057575} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.916285] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c654bf22-7916-43e8-a693-01fdc2c52422 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.919473] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 879.919922] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819/7b7032a8-8093-43fb-b2e2-c6308d96e819.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 879.920417] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65854967-7a6d-47ca-ae73-b6195cabc76f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.929965] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 879.929965] env[62204]: value = "task-1199959" [ 879.929965] env[62204]: _type = "Task" [ 879.929965] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.942515] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.953939] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.954347] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.954955] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Deleting the datastore file [datastore1] 0a720922-60ea-4b31-ba56-cdcbba1ab629 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.954955] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7e0ce63-ade5-4c61-a8fa-62304a292c26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.964373] env[62204]: DEBUG oslo_vmware.api [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for the task: (returnval){ [ 879.964373] env[62204]: value = "task-1199960" [ 879.964373] env[62204]: _type = "Task" [ 879.964373] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.974539] env[62204]: DEBUG oslo_vmware.api [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.062295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.062482] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 880.067369] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.850s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.069565] env[62204]: INFO nova.compute.claims [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.116529] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199956, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.19151} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.117408] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.119571] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521e993c-c073-4434-b3c5-d56c4264b1d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.149072] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 211ca0c1-cf05-4148-ad5c-46cbbd72278e/211ca0c1-cf05-4148-ad5c-46cbbd72278e.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.150040] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20462088-a2e0-45e8-ada6-5d9932e3e48e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.180076] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 880.180076] env[62204]: value = "task-1199963" [ 880.180076] env[62204]: _type = "Task" [ 880.180076] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.191124] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199963, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.212815] env[62204]: DEBUG oslo_vmware.api [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Task: {'id': task-1199954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.733132} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.212815] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.212815] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.213942] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.214801] env[62204]: INFO nova.compute.manager [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Took 1.67 seconds to destroy the instance on the hypervisor. [ 880.215097] env[62204]: DEBUG oslo.service.loopingcall [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.215316] env[62204]: DEBUG nova.compute.manager [-] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.215408] env[62204]: DEBUG nova.network.neutron [-] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.324055] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.441037] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199959, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.475214] env[62204]: DEBUG oslo_vmware.api [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Task: {'id': task-1199960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258544} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.475539] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.477808] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.477808] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.477808] env[62204]: INFO nova.compute.manager [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Took 1.11 seconds to destroy the instance on the hypervisor. [ 880.477808] env[62204]: DEBUG oslo.service.loopingcall [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.477808] env[62204]: DEBUG nova.compute.manager [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 880.477808] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 880.496954] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 880.568887] env[62204]: DEBUG nova.compute.utils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.575972] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 880.576467] env[62204]: DEBUG nova.network.neutron [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 880.634231] env[62204]: DEBUG nova.policy [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '370d4b8a24b84bf0a626d056c7758863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb9a24ef26c74781a2ad36e3430ce630', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 880.687791] env[62204]: DEBUG nova.compute.manager [req-60a79f41-5b56-48fd-af0a-b4aef176b22c req-e4be9a0f-06b6-45a2-b948-d61cf0285fa4 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Received event network-vif-deleted-eaf8a6d2-9ed4-4008-9072-23d2ac93fc16 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 880.688188] env[62204]: INFO nova.compute.manager [req-60a79f41-5b56-48fd-af0a-b4aef176b22c req-e4be9a0f-06b6-45a2-b948-d61cf0285fa4 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Neutron deleted interface eaf8a6d2-9ed4-4008-9072-23d2ac93fc16; detaching it from the instance and deleting it from the info cache [ 880.688577] env[62204]: DEBUG nova.network.neutron [req-60a79f41-5b56-48fd-af0a-b4aef176b22c req-e4be9a0f-06b6-45a2-b948-d61cf0285fa4 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.697489] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.916501] env[62204]: DEBUG nova.network.neutron [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Successfully created port: 337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.944363] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199959, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.000525] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.077048] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 881.091558] env[62204]: DEBUG nova.network.neutron [-] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.194255] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec24067f-21bd-4e67-a96f-f74c37e0a915 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.202909] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.216499] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28713aef-737c-4302-a604-878e8c0113c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.272953] env[62204]: DEBUG nova.compute.manager [req-60a79f41-5b56-48fd-af0a-b4aef176b22c req-e4be9a0f-06b6-45a2-b948-d61cf0285fa4 service nova] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Detach interface failed, port_id=eaf8a6d2-9ed4-4008-9072-23d2ac93fc16, reason: Instance 51c9e353-f2cf-41b4-b37e-1cfd5dca0518 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 881.446452] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199959, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.302597} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.446647] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819/7b7032a8-8093-43fb-b2e2-c6308d96e819.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 881.446983] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 881.447606] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a5545cc-7c23-4243-951a-a42d65259e35 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.459902] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 881.459902] env[62204]: value = "task-1199964" [ 881.459902] env[62204]: _type = "Task" [ 881.459902] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.471845] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.503467] env[62204]: INFO nova.compute.manager [-] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Took 1.03 seconds to deallocate network for instance. [ 881.556102] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2e6c60-182f-4fc1-8f4d-c9708d4f82f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.565135] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0881e5-88bd-4c02-aff3-81e5a27f99f6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.601115] env[62204]: INFO nova.compute.manager [-] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Took 1.39 seconds to deallocate network for instance. [ 881.604031] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87613061-6cc7-43bd-8950-7b73c51467ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.617165] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2da34f-a9ae-4ec2-8018-8f579dc8c0e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.634929] env[62204]: DEBUG nova.compute.provider_tree [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.694833] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199963, 'name': ReconfigVM_Task, 'duration_secs': 1.26134} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.694833] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 211ca0c1-cf05-4148-ad5c-46cbbd72278e/211ca0c1-cf05-4148-ad5c-46cbbd72278e.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.694833] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9678216-d9b2-4ccf-a79e-e309608c2ed2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.702168] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 881.702168] env[62204]: value = "task-1199965" [ 881.702168] env[62204]: _type = "Task" [ 881.702168] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.711976] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199965, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.841337] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a67ebc-ad1c-4e46-87a1-f2fd22140c72 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.861229] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 881.973236] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071619} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.973561] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 881.974393] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91907c66-9e08-4212-8a51-3dc2ce83c618 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.998068] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819/7b7032a8-8093-43fb-b2e2-c6308d96e819.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 881.998424] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ceb2a41-46b9-44c1-8a1a-35f6db0801a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.017271] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.019709] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 882.019709] env[62204]: value = "task-1199966" [ 882.019709] env[62204]: _type = "Task" [ 882.019709] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.029068] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199966, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.104559] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 882.112354] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.129176] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.129479] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.129672] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.129878] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.130083] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.130273] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.130560] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.130774] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.131055] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.131173] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.131398] env[62204]: DEBUG nova.virt.hardware [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.132327] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0129ad69-a2ca-4608-9b2d-79a939ce4865 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.141487] env[62204]: DEBUG nova.scheduler.client.report [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 882.146337] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef1aa29-ffee-4425-978a-4747f9d8acc9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.213278] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199965, 'name': Rename_Task, 'duration_secs': 0.279694} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.213761] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.214156] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2ba6989-6066-4e0d-afd0-5e6399083d21 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.222458] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 882.222458] env[62204]: value = "task-1199968" [ 882.222458] env[62204]: _type = "Task" [ 882.222458] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.233711] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.370425] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 882.370752] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb6bab5d-f454-4859-8c60-be392784fecc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.380584] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 882.380584] env[62204]: value = "task-1199969" [ 882.380584] env[62204]: _type = "Task" [ 882.380584] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.392890] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 882.394098] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 882.404935] env[62204]: DEBUG nova.compute.manager [req-aef8c661-14d3-4089-9471-f75028b73be9 req-7ecd765e-f3eb-47b8-b4b3-4ece0bda3740 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Received event network-vif-plugged-337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 882.405201] env[62204]: DEBUG oslo_concurrency.lockutils [req-aef8c661-14d3-4089-9471-f75028b73be9 req-7ecd765e-f3eb-47b8-b4b3-4ece0bda3740 service nova] Acquiring lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.405487] env[62204]: DEBUG oslo_concurrency.lockutils [req-aef8c661-14d3-4089-9471-f75028b73be9 req-7ecd765e-f3eb-47b8-b4b3-4ece0bda3740 service nova] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.405737] env[62204]: DEBUG oslo_concurrency.lockutils [req-aef8c661-14d3-4089-9471-f75028b73be9 req-7ecd765e-f3eb-47b8-b4b3-4ece0bda3740 service nova] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.405970] env[62204]: DEBUG nova.compute.manager [req-aef8c661-14d3-4089-9471-f75028b73be9 req-7ecd765e-f3eb-47b8-b4b3-4ece0bda3740 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] No waiting events found dispatching network-vif-plugged-337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.406691] env[62204]: WARNING nova.compute.manager [req-aef8c661-14d3-4089-9471-f75028b73be9 req-7ecd765e-f3eb-47b8-b4b3-4ece0bda3740 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Received unexpected event network-vif-plugged-337d50b5-86ec-4cc3-92f3-86bd3fe37b54 for instance with vm_state building and task_state spawning. [ 882.518281] env[62204]: DEBUG nova.network.neutron [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Successfully updated port: 337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.532179] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199966, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.654126] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.654773] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 882.659262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.675s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.659262] env[62204]: DEBUG nova.objects.instance [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lazy-loading 'resources' on Instance uuid 1a1cb81f-383e-48de-8c11-3d5e2c801f40 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 882.735371] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199968, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.901552] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.901886] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.902183] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.902395] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.902607] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.902792] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.903019] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.903244] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.903455] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.903652] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.903906] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.909912] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a632527d-e66e-441e-be1a-3c50ed61df8e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.933731] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 882.933731] env[62204]: value = "task-1199970" [ 882.933731] env[62204]: _type = "Task" [ 882.933731] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.943859] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.021471] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "refresh_cache-bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.021471] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "refresh_cache-bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.021699] env[62204]: DEBUG nova.network.neutron [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.033584] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199966, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.165353] env[62204]: DEBUG nova.compute.utils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 883.168835] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 883.169066] env[62204]: DEBUG nova.network.neutron [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 883.218666] env[62204]: DEBUG nova.policy [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7a06a775394647db8910e3b045d1644a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce3664afab5c4ac28a6dbf1ba0fec2fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 883.235132] env[62204]: DEBUG oslo_vmware.api [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199968, 'name': PowerOnVM_Task, 'duration_secs': 0.665164} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.235811] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.235811] env[62204]: INFO nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Took 9.32 seconds to spawn the instance on the hypervisor. [ 883.235907] env[62204]: DEBUG nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 883.236753] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da8f929-c1f4-4e4b-8e60-96456d3944d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.447999] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199970, 'name': ReconfigVM_Task, 'duration_secs': 0.242161} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.448362] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 883.537796] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199966, 'name': ReconfigVM_Task, 'duration_secs': 1.342181} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.540764] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819/7b7032a8-8093-43fb-b2e2-c6308d96e819.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 883.542082] env[62204]: DEBUG nova.network.neutron [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Successfully created port: 454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.544445] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05b65b05-b0f9-47ea-a3fc-4d1be643669b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.554210] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 883.554210] env[62204]: value = "task-1199971" [ 883.554210] env[62204]: _type = "Task" [ 883.554210] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.570810] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199971, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.600378] env[62204]: DEBUG nova.network.neutron [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.614082] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4418361e-2d9c-45ba-bef3-d062486d9062 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.629969] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91fee3d-df9e-4843-a2d6-cfe5ba6d26f0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.669531] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3138c560-5533-443a-8fde-ed02df33e9cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.672520] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 883.682485] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd279533-8e88-43d9-9d73-017d218a7a1d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.697620] env[62204]: DEBUG nova.compute.provider_tree [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.762909] env[62204]: INFO nova.compute.manager [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Took 52.19 seconds to build instance. [ 883.869841] env[62204]: DEBUG nova.network.neutron [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Updating instance_info_cache with network_info: [{"id": "337d50b5-86ec-4cc3-92f3-86bd3fe37b54", "address": "fa:16:3e:4f:1f:6d", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d50b5-86", "ovs_interfaceid": "337d50b5-86ec-4cc3-92f3-86bd3fe37b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.957608] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 883.957608] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 883.957608] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.957608] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 883.957608] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.957608] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 883.957865] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 883.957991] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 883.958250] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 883.958439] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 883.958699] env[62204]: DEBUG nova.virt.hardware [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 883.964209] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Reconfiguring VM instance instance-0000002a to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 883.964553] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b099504b-b1ff-4d6a-b76a-2ea142660ec0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.986092] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 883.986092] env[62204]: value = "task-1199972" [ 883.986092] env[62204]: _type = "Task" [ 883.986092] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.995881] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199972, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.066422] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199971, 'name': Rename_Task, 'duration_secs': 0.173469} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.066740] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.067033] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad4726d5-39c4-4e0a-9a96-2efbe8949905 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.075620] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 884.075620] env[62204]: value = "task-1199973" [ 884.075620] env[62204]: _type = "Task" [ 884.075620] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.085359] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199973, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.203133] env[62204]: DEBUG nova.scheduler.client.report [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 884.262225] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.266299] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7cf0ef16-3832-46ea-9a68-79b53c6dff57 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.057s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.266735] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.005s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.267620] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.267620] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.267806] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.271049] env[62204]: INFO nova.compute.manager [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Terminating instance [ 884.274471] env[62204]: DEBUG nova.compute.manager [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 884.274471] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.275303] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdf9ef6-43e4-489e-8613-fd69f2e1cc71 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.288724] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.289196] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c90ae7c-a8c5-41cc-8ffe-cf5be27838c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.299039] env[62204]: DEBUG oslo_vmware.api [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 884.299039] env[62204]: value = "task-1199974" [ 884.299039] env[62204]: _type = "Task" [ 884.299039] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.309183] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 884.309369] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260083', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'name': 'volume-f975a826-6b8c-4488-b353-b62effa059b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '62605b48-e640-4b4d-ab77-1ed44a75daa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'serial': 'f975a826-6b8c-4488-b353-b62effa059b3'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 884.310231] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65bce8c0-48eb-444d-adc7-217ba5992b75 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.319028] env[62204]: DEBUG oslo_vmware.api [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.332028] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372d34eb-fc02-4c5b-9958-3cfc86607529 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.359568] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] volume-f975a826-6b8c-4488-b353-b62effa059b3/volume-f975a826-6b8c-4488-b353-b62effa059b3.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.360349] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26e7020e-30f2-4ce1-895a-12a99ee0368c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.374054] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "refresh_cache-bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.374425] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Instance network_info: |[{"id": "337d50b5-86ec-4cc3-92f3-86bd3fe37b54", "address": "fa:16:3e:4f:1f:6d", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d50b5-86", "ovs_interfaceid": "337d50b5-86ec-4cc3-92f3-86bd3fe37b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 884.374983] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:1f:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '337d50b5-86ec-4cc3-92f3-86bd3fe37b54', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.383020] env[62204]: DEBUG oslo.service.loopingcall [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.383830] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.384156] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0141cd2f-92a4-417b-b1f4-82147f534b89 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.402708] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 884.402708] env[62204]: value = "task-1199975" [ 884.402708] env[62204]: _type = "Task" [ 884.402708] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.410251] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.410251] env[62204]: value = "task-1199976" [ 884.410251] env[62204]: _type = "Task" [ 884.410251] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.413507] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199975, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.422938] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199976, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.436484] env[62204]: DEBUG nova.compute.manager [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Received event network-changed-337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 884.436683] env[62204]: DEBUG nova.compute.manager [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Refreshing instance network info cache due to event network-changed-337d50b5-86ec-4cc3-92f3-86bd3fe37b54. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 884.436907] env[62204]: DEBUG oslo_concurrency.lockutils [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] Acquiring lock "refresh_cache-bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.437115] env[62204]: DEBUG oslo_concurrency.lockutils [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] Acquired lock "refresh_cache-bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.437290] env[62204]: DEBUG nova.network.neutron [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Refreshing network info cache for port 337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.498841] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199972, 'name': ReconfigVM_Task, 'duration_secs': 0.283287} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.499185] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Reconfigured VM instance instance-0000002a to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 884.500166] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b417f70f-fa62-47a9-83a7-ae11d65a760e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.529087] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6/0a4a432d-a71a-4da7-be90-25dcec5a64c6.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.529507] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b188cc4-ba2f-45a4-aa9a-d6732ec9f07e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.551390] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 884.551390] env[62204]: value = "task-1199977" [ 884.551390] env[62204]: _type = "Task" [ 884.551390] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.567153] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199977, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.586544] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.686038] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 884.708471] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.713377] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 884.713833] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 884.714179] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 884.714456] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 884.714695] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 884.714889] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 884.715212] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 884.715463] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 884.715695] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 884.716134] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 884.716250] env[62204]: DEBUG nova.virt.hardware [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 884.716661] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.318s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.716976] env[62204]: DEBUG nova.objects.instance [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lazy-loading 'resources' on Instance uuid a2a37a1b-3ef0-4be7-924c-66c7a1583b68 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 884.719772] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf67314-391c-4ae0-94da-015d6152ffc5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.732498] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d5c69c-45ca-4726-a108-bf3c4ebfe187 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.738256] env[62204]: INFO nova.scheduler.client.report [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleted allocations for instance 1a1cb81f-383e-48de-8c11-3d5e2c801f40 [ 884.811970] env[62204]: DEBUG oslo_vmware.api [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199974, 'name': PowerOffVM_Task, 'duration_secs': 0.240309} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.812386] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.812634] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.813396] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3f74d4b-9329-4c64-83a4-87812def7192 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.909290] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.909500] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.909694] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Deleting the datastore file [datastore2] 211ca0c1-cf05-4148-ad5c-46cbbd72278e {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.910432] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03d0a8b0-6676-4eee-9f2d-d550dd13a40a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.916219] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199975, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.921400] env[62204]: DEBUG oslo_vmware.api [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for the task: (returnval){ [ 884.921400] env[62204]: value = "task-1199979" [ 884.921400] env[62204]: _type = "Task" [ 884.921400] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.928106] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199976, 'name': CreateVM_Task, 'duration_secs': 0.482416} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.928712] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.929919] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.930165] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.930820] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.931157] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7863654-0a24-4496-8de3-04a1f4f7dea9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.936766] env[62204]: DEBUG oslo_vmware.api [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.943249] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 884.943249] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d99640-e79c-83bf-d0a0-b56f06038565" [ 884.943249] env[62204]: _type = "Task" [ 884.943249] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.952695] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d99640-e79c-83bf-d0a0-b56f06038565, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.062932] env[62204]: DEBUG oslo_vmware.api [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1199977, 'name': ReconfigVM_Task, 'duration_secs': 0.405683} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.063303] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6/0a4a432d-a71a-4da7-be90-25dcec5a64c6.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.063621] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 885.093232] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199973, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.151462] env[62204]: DEBUG nova.network.neutron [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Successfully updated port: 454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.249714] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cb166e8b-e0cf-48c3-b94b-3fc72bb805ff tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1a1cb81f-383e-48de-8c11-3d5e2c801f40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.337s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.343334] env[62204]: DEBUG nova.network.neutron [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Updated VIF entry in instance network info cache for port 337d50b5-86ec-4cc3-92f3-86bd3fe37b54. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 885.343927] env[62204]: DEBUG nova.network.neutron [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Updating instance_info_cache with network_info: [{"id": "337d50b5-86ec-4cc3-92f3-86bd3fe37b54", "address": "fa:16:3e:4f:1f:6d", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap337d50b5-86", "ovs_interfaceid": "337d50b5-86ec-4cc3-92f3-86bd3fe37b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.415415] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199975, 'name': ReconfigVM_Task, 'duration_secs': 0.536272} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.420839] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Reconfigured VM instance instance-0000004f to attach disk [datastore2] volume-f975a826-6b8c-4488-b353-b62effa059b3/volume-f975a826-6b8c-4488-b353-b62effa059b3.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.427684] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5fcf481-0e9c-4ec6-9012-97049459d5c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.453732] env[62204]: DEBUG oslo_vmware.api [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Task: {'id': task-1199979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247482} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.458419] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.458677] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.458864] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.459061] env[62204]: INFO nova.compute.manager [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 885.459385] env[62204]: DEBUG oslo.service.loopingcall [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 885.459695] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 885.459695] env[62204]: value = "task-1199980" [ 885.459695] env[62204]: _type = "Task" [ 885.459695] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.459906] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d99640-e79c-83bf-d0a0-b56f06038565, 'name': SearchDatastore_Task, 'duration_secs': 0.015876} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.463022] env[62204]: DEBUG nova.compute.manager [-] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 885.463153] env[62204]: DEBUG nova.network.neutron [-] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 885.464902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.465180] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.465467] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.465634] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.465850] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.469907] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-204c0eb1-9c5f-43a8-84d2-398a8645a74f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.479368] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.483032] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.483223] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.485238] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecba4fcb-16ae-4668-92b9-ff8b4824e33a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.490223] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 885.490223] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52059970-8671-11fc-f153-1d1809ec9d91" [ 885.490223] env[62204]: _type = "Task" [ 885.490223] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.500397] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52059970-8671-11fc-f153-1d1809ec9d91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.570582] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acb0174-aad4-4cb4-b021-b2b7a0949864 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.596299] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7687871-3068-41c6-a535-1a8139138759 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.605710] env[62204]: DEBUG oslo_vmware.api [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1199973, 'name': PowerOnVM_Task, 'duration_secs': 1.237912} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.620034] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.620331] env[62204]: INFO nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Took 10.21 seconds to spawn the instance on the hypervisor. [ 885.620572] env[62204]: DEBUG nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.620941] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 885.627773] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b04ea36-6484-4988-9f40-5d3377355869 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.657244] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.657432] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquired lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.657593] env[62204]: DEBUG nova.network.neutron [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 885.726919] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a328dc-58e3-4638-b404-99df7800f75d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.736208] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4670cdd-4058-4cc2-80a2-5b3e5d51a68a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.770598] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b562e7ed-f540-42ff-9395-837487a2db41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.780253] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d195ea31-0c76-4cba-9e20-a0178591fd06 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.795941] env[62204]: DEBUG nova.compute.provider_tree [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.847157] env[62204]: DEBUG oslo_concurrency.lockutils [req-434024d5-4a0b-4d17-ab5f-e74fadb1f3aa req-523bb300-4f25-4f54-a89c-df70b54cf125 service nova] Releasing lock "refresh_cache-bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.937576] env[62204]: DEBUG nova.compute.manager [req-65085503-16a6-440a-a41a-5e919d669299 req-7ae64597-c635-4678-b450-dac9ac3ebf4d service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Received event network-vif-deleted-598f0c6c-7d5c-4ecf-bbde-94d1236e560c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.937841] env[62204]: INFO nova.compute.manager [req-65085503-16a6-440a-a41a-5e919d669299 req-7ae64597-c635-4678-b450-dac9ac3ebf4d service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Neutron deleted interface 598f0c6c-7d5c-4ecf-bbde-94d1236e560c; detaching it from the instance and deleting it from the info cache [ 885.938060] env[62204]: DEBUG nova.network.neutron [req-65085503-16a6-440a-a41a-5e919d669299 req-7ae64597-c635-4678-b450-dac9ac3ebf4d service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.975808] env[62204]: DEBUG oslo_vmware.api [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199980, 'name': ReconfigVM_Task, 'duration_secs': 0.154497} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.976231] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260083', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'name': 'volume-f975a826-6b8c-4488-b353-b62effa059b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '62605b48-e640-4b4d-ab77-1ed44a75daa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'serial': 'f975a826-6b8c-4488-b353-b62effa059b3'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 886.006188] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52059970-8671-11fc-f153-1d1809ec9d91, 'name': SearchDatastore_Task, 'duration_secs': 0.011487} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.007149] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f08b30c2-80e5-428f-97bc-7b7b7c7d0561 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.014959] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 886.014959] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eda2b8-1311-2336-c1be-68d83495a155" [ 886.014959] env[62204]: _type = "Task" [ 886.014959] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.024435] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eda2b8-1311-2336-c1be-68d83495a155, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.146264] env[62204]: INFO nova.compute.manager [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Took 44.76 seconds to build instance. [ 886.173547] env[62204]: DEBUG nova.network.neutron [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Port ccf86a68-c525-4b8b-940f-b0a08f2d3831 binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 886.193829] env[62204]: DEBUG nova.network.neutron [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 886.299042] env[62204]: DEBUG nova.scheduler.client.report [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 886.342867] env[62204]: DEBUG nova.network.neutron [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updating instance_info_cache with network_info: [{"id": "454e3072-3434-44df-b410-3e41abc2baca", "address": "fa:16:3e:b7:a5:94", "network": {"id": "97127721-f740-4f77-ba5e-e3cf66bd2296", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-758803428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3664afab5c4ac28a6dbf1ba0fec2fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e3072-34", "ovs_interfaceid": "454e3072-3434-44df-b410-3e41abc2baca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.409826] env[62204]: DEBUG nova.network.neutron [-] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.444335] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a5b4a723-6b4c-413c-89e4-553ce78d3723 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.454613] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1522babf-2aff-410c-8a0e-5aa474cde5c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.496193] env[62204]: DEBUG nova.compute.manager [req-65085503-16a6-440a-a41a-5e919d669299 req-7ae64597-c635-4678-b450-dac9ac3ebf4d service nova] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Detach interface failed, port_id=598f0c6c-7d5c-4ecf-bbde-94d1236e560c, reason: Instance 211ca0c1-cf05-4148-ad5c-46cbbd72278e could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 886.501225] env[62204]: DEBUG nova.compute.manager [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Received event network-vif-plugged-454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.501225] env[62204]: DEBUG oslo_concurrency.lockutils [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] Acquiring lock "f445a8ea-ff21-44e9-8389-231a03c51650-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.501225] env[62204]: DEBUG oslo_concurrency.lockutils [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] Lock "f445a8ea-ff21-44e9-8389-231a03c51650-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.501225] env[62204]: DEBUG oslo_concurrency.lockutils [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] Lock "f445a8ea-ff21-44e9-8389-231a03c51650-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.501564] env[62204]: DEBUG nova.compute.manager [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] No waiting events found dispatching network-vif-plugged-454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 886.501958] env[62204]: WARNING nova.compute.manager [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Received unexpected event network-vif-plugged-454e3072-3434-44df-b410-3e41abc2baca for instance with vm_state building and task_state spawning. [ 886.502273] env[62204]: DEBUG nova.compute.manager [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Received event network-changed-454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 886.502582] env[62204]: DEBUG nova.compute.manager [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Refreshing instance network info cache due to event network-changed-454e3072-3434-44df-b410-3e41abc2baca. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 886.502886] env[62204]: DEBUG oslo_concurrency.lockutils [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] Acquiring lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.526828] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eda2b8-1311-2336-c1be-68d83495a155, 'name': SearchDatastore_Task, 'duration_secs': 0.011712} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.527196] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.527533] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] bd0f87d1-e53a-4433-afc6-6aea7e68d6f3/bd0f87d1-e53a-4433-afc6-6aea7e68d6f3.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.527900] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2df16ca-0524-40b7-9f14-32b8be0ac428 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.536377] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 886.536377] env[62204]: value = "task-1199981" [ 886.536377] env[62204]: _type = "Task" [ 886.536377] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.545993] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199981, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.648218] env[62204]: DEBUG oslo_concurrency.lockutils [None req-25a90dad-f211-4313-a43a-0b6a5f3e4506 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.503s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.805048] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.088s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.809296] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.366s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 886.809754] env[62204]: INFO nova.compute.claims [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 886.833382] env[62204]: INFO nova.scheduler.client.report [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Deleted allocations for instance a2a37a1b-3ef0-4be7-924c-66c7a1583b68 [ 886.846373] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Releasing lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.846732] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Instance network_info: |[{"id": "454e3072-3434-44df-b410-3e41abc2baca", "address": "fa:16:3e:b7:a5:94", "network": {"id": "97127721-f740-4f77-ba5e-e3cf66bd2296", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-758803428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3664afab5c4ac28a6dbf1ba0fec2fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e3072-34", "ovs_interfaceid": "454e3072-3434-44df-b410-3e41abc2baca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 886.847113] env[62204]: DEBUG oslo_concurrency.lockutils [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] Acquired lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.847340] env[62204]: DEBUG nova.network.neutron [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Refreshing network info cache for port 454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 886.848715] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:a5:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9aa05ef8-c7bb-4af5-983f-bfa0f3f88223', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '454e3072-3434-44df-b410-3e41abc2baca', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 886.856453] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Creating folder: Project (ce3664afab5c4ac28a6dbf1ba0fec2fe). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 886.857446] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c5e95f4-2031-4379-a80a-0ff32d65b8b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.874586] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Created folder: Project (ce3664afab5c4ac28a6dbf1ba0fec2fe) in parent group-v259933. [ 886.874586] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Creating folder: Instances. Parent ref: group-v260085. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 886.874815] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ac71355-f86e-4519-a57a-8f7ca1d00676 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.888999] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Created folder: Instances in parent group-v260085. [ 886.889318] env[62204]: DEBUG oslo.service.loopingcall [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 886.889550] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 886.889761] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5e9e06b-acb4-4767-8776-42078c7cd6fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.912309] env[62204]: INFO nova.compute.manager [-] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Took 1.45 seconds to deallocate network for instance. [ 886.912492] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 886.912492] env[62204]: value = "task-1199984" [ 886.912492] env[62204]: _type = "Task" [ 886.912492] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.929570] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199984, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.016377] env[62204]: DEBUG nova.objects.instance [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'flavor' on Instance uuid 62605b48-e640-4b4d-ab77-1ed44a75daa3 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.050052] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199981, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475262} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.050052] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] bd0f87d1-e53a-4433-afc6-6aea7e68d6f3/bd0f87d1-e53a-4433-afc6-6aea7e68d6f3.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.050052] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.050052] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c90e4e2a-a2e7-40da-b6dc-998b7426c888 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.058859] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 887.058859] env[62204]: value = "task-1199985" [ 887.058859] env[62204]: _type = "Task" [ 887.058859] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.068654] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199985, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.170414] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52091a22-c7e0-f428-8ccb-aaac64a3f721/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 887.171503] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dbbc745-4435-4821-8f09-78f49c5ef7c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.179028] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52091a22-c7e0-f428-8ccb-aaac64a3f721/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 887.179028] env[62204]: ERROR oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52091a22-c7e0-f428-8ccb-aaac64a3f721/disk-0.vmdk due to incomplete transfer. [ 887.179028] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-17ffbebe-9315-4aba-8844-063f24346d03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.190938] env[62204]: DEBUG oslo_vmware.rw_handles [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52091a22-c7e0-f428-8ccb-aaac64a3f721/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 887.191161] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Uploaded image 7c0de877-a093-4216-b197-5c01c492bbe6 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 887.193533] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 887.196246] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8fa41b30-fa2f-4984-8288-c56f0389c051 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.204187] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.204547] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 887.204841] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.214251] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 887.214251] env[62204]: value = "task-1199986" [ 887.214251] env[62204]: _type = "Task" [ 887.214251] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.224868] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199986, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.344879] env[62204]: DEBUG oslo_concurrency.lockutils [None req-72313225-4cf4-4bb4-9bd0-0b680c8a878b tempest-ServersTestMultiNic-1202567639 tempest-ServersTestMultiNic-1202567639-project-member] Lock "a2a37a1b-3ef0-4be7-924c-66c7a1583b68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.995s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.429584] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 887.430540] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199984, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.521803] env[62204]: DEBUG oslo_concurrency.lockutils [None req-072d2df7-b748-42c5-810e-6df01728d4d2 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.864s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 887.571474] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199985, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068962} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.571783] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.572639] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11133921-24c0-41b8-9d4a-217f8e623a4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.600024] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] bd0f87d1-e53a-4433-afc6-6aea7e68d6f3/bd0f87d1-e53a-4433-afc6-6aea7e68d6f3.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.600024] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db35c8d5-87a5-49d6-af57-954dd1ecc10a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.624445] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 887.624445] env[62204]: value = "task-1199987" [ 887.624445] env[62204]: _type = "Task" [ 887.624445] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.635245] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.653490] env[62204]: DEBUG nova.network.neutron [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updated VIF entry in instance network info cache for port 454e3072-3434-44df-b410-3e41abc2baca. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 887.653949] env[62204]: DEBUG nova.network.neutron [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updating instance_info_cache with network_info: [{"id": "454e3072-3434-44df-b410-3e41abc2baca", "address": "fa:16:3e:b7:a5:94", "network": {"id": "97127721-f740-4f77-ba5e-e3cf66bd2296", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-758803428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3664afab5c4ac28a6dbf1ba0fec2fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e3072-34", "ovs_interfaceid": "454e3072-3434-44df-b410-3e41abc2baca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.726477] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199986, 'name': Destroy_Task, 'duration_secs': 0.463143} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.726857] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Destroyed the VM [ 887.727297] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 887.727573] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-025a1eff-3d91-446d-b404-cb8a2c6f8a64 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.735259] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 887.735259] env[62204]: value = "task-1199988" [ 887.735259] env[62204]: _type = "Task" [ 887.735259] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.745423] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199988, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.933180] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1199984, 'name': CreateVM_Task, 'duration_secs': 0.889905} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.933180] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 887.933180] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 887.933180] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.933180] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 887.933180] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-341d4152-81d2-47ff-9c7a-e5c75073c208 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.943986] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 887.943986] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5254917e-c05c-179f-3971-24c72cbcf035" [ 887.943986] env[62204]: _type = "Task" [ 887.943986] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.954310] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5254917e-c05c-179f-3971-24c72cbcf035, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.134689] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199987, 'name': ReconfigVM_Task, 'duration_secs': 0.393476} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.137575] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Reconfigured VM instance instance-00000052 to attach disk [datastore2] bd0f87d1-e53a-4433-afc6-6aea7e68d6f3/bd0f87d1-e53a-4433-afc6-6aea7e68d6f3.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.138499] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-009132c9-b42c-46db-bf5c-974d7ae6c94a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.149651] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 888.149651] env[62204]: value = "task-1199989" [ 888.149651] env[62204]: _type = "Task" [ 888.149651] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.156340] env[62204]: DEBUG oslo_concurrency.lockutils [req-fc44ddc8-d9e4-462c-898e-4dc76cfea0be req-6145aabc-b39c-4975-9a94-386052f51bde service nova] Releasing lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.163093] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199989, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.230359] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681c6b8e-6646-4895-a8da-981f1102f26f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.243925] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87963a73-2814-4a25-b0eb-e34732e48dee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.252587] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199988, 'name': RemoveSnapshot_Task} progress is 29%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.284293] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.284562] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.284785] env[62204]: DEBUG nova.network.neutron [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 888.286674] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5594fcc6-0b03-43a6-87c3-13af6375fbb7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.296528] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ff46f4-f5be-45f3-bbd7-0869244cb282 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.312028] env[62204]: DEBUG nova.compute.provider_tree [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.456456] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5254917e-c05c-179f-3971-24c72cbcf035, 'name': SearchDatastore_Task, 'duration_secs': 0.011063} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.457527] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 888.457835] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 888.458150] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 888.458369] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.458620] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 888.458925] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfef8fe9-7b89-46fa-a0af-4e51b6fe2827 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.468376] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.468660] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.468907] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "62605b48-e640-4b4d-ab77-1ed44a75daa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.469161] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.472219] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.474403] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 888.474595] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 888.475612] env[62204]: INFO nova.compute.manager [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Terminating instance [ 888.477531] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fd2ca9e-db38-4b06-99a1-a519f55c994f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.481567] env[62204]: DEBUG nova.compute.manager [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 888.481898] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.482442] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5d34b2d-2218-4182-9a62-f59627543c41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.487928] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 888.487928] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526b3cc2-3702-4958-0fcb-4ddf98b1cf1d" [ 888.487928] env[62204]: _type = "Task" [ 888.487928] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.492740] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 888.492740] env[62204]: value = "task-1199990" [ 888.492740] env[62204]: _type = "Task" [ 888.492740] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.499171] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526b3cc2-3702-4958-0fcb-4ddf98b1cf1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.504561] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.659804] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199989, 'name': Rename_Task, 'duration_secs': 0.166665} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.660244] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.660339] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e7f0b2e-2f83-4b67-b76c-6fb17c703f5c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.667845] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 888.667845] env[62204]: value = "task-1199991" [ 888.667845] env[62204]: _type = "Task" [ 888.667845] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.675764] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.749427] env[62204]: DEBUG oslo_vmware.api [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1199988, 'name': RemoveSnapshot_Task, 'duration_secs': 0.88832} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.750444] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 888.750444] env[62204]: INFO nova.compute.manager [None req-04151877-ab6a-4101-a66b-0a46346f9e75 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Took 14.00 seconds to snapshot the instance on the hypervisor. [ 888.815018] env[62204]: DEBUG nova.scheduler.client.report [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 888.845576] env[62204]: DEBUG nova.compute.manager [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Stashing vm_state: active {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 889.014631] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526b3cc2-3702-4958-0fcb-4ddf98b1cf1d, 'name': SearchDatastore_Task, 'duration_secs': 0.009765} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.018344] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199990, 'name': PowerOffVM_Task, 'duration_secs': 0.49505} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.018628] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9316a06b-ad75-46b7-ad4e-68d10e823e71 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.021150] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.021415] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 889.021630] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260083', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'name': 'volume-f975a826-6b8c-4488-b353-b62effa059b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '62605b48-e640-4b4d-ab77-1ed44a75daa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'serial': 'f975a826-6b8c-4488-b353-b62effa059b3'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 889.022440] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84311ac2-e965-4ab5-bc58-2fcdffec0a1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.048822] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6796a0-dcda-4a75-bb29-b5f80e438112 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.051885] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 889.051885] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529cae05-dce1-5566-8b37-aca488ff0757" [ 889.051885] env[62204]: _type = "Task" [ 889.051885] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.060613] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c951063-f3d6-4c40-8eab-bac5c1b3fb5c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.066221] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529cae05-dce1-5566-8b37-aca488ff0757, 'name': SearchDatastore_Task, 'duration_secs': 0.023573} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.066829] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.067114] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] f445a8ea-ff21-44e9-8389-231a03c51650/f445a8ea-ff21-44e9-8389-231a03c51650.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.067372] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7412227f-af79-4db5-89ba-06fa274fe99c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.086413] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07771b79-dc6a-4de8-bfad-91ebf895b4b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.091976] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 889.091976] env[62204]: value = "task-1199992" [ 889.091976] env[62204]: _type = "Task" [ 889.091976] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.102729] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] The volume has not been displaced from its original location: [datastore2] volume-f975a826-6b8c-4488-b353-b62effa059b3/volume-f975a826-6b8c-4488-b353-b62effa059b3.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 889.108193] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Reconfiguring VM instance instance-0000004f to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 889.114389] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bb9889f-4d1b-405d-8f5c-0481ab0d1b9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.134917] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.136410] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 889.136410] env[62204]: value = "task-1199993" [ 889.136410] env[62204]: _type = "Task" [ 889.136410] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.148015] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.181064] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199991, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.262750] env[62204]: DEBUG nova.network.neutron [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.326705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.519s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 889.327328] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 889.330846] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.859s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 889.330957] env[62204]: DEBUG nova.objects.instance [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lazy-loading 'resources' on Instance uuid 1121b1b8-127e-475f-8dfc-de43911de39a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.372181] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 889.618605] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199992, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.651687] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199993, 'name': ReconfigVM_Task, 'duration_secs': 0.396903} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.652168] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Reconfigured VM instance instance-0000004f to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 889.657935] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cbf62fc6-8236-44ec-be1b-9a8134cd41ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.681401] env[62204]: DEBUG oslo_vmware.api [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1199991, 'name': PowerOnVM_Task, 'duration_secs': 0.689829} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.681816] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.681975] env[62204]: INFO nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Took 7.58 seconds to spawn the instance on the hypervisor. [ 889.682295] env[62204]: DEBUG nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 889.684469] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848bd7ee-a0cc-4940-9df3-f89507de0b8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.687870] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 889.687870] env[62204]: value = "task-1199994" [ 889.687870] env[62204]: _type = "Task" [ 889.687870] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.701324] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199994, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.767039] env[62204]: DEBUG oslo_concurrency.lockutils [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 889.837854] env[62204]: DEBUG nova.compute.utils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 889.842813] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 889.842813] env[62204]: DEBUG nova.network.neutron [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 889.906822] env[62204]: DEBUG nova.policy [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c29444fe298d48578ce250063841a3c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec87b51786754b05aa75abb818bdbc15', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 890.119950] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199992, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.675937} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.120359] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] f445a8ea-ff21-44e9-8389-231a03c51650/f445a8ea-ff21-44e9-8389-231a03c51650.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.120607] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.120874] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f805057-7880-42f1-98d2-ac8e79225c50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.128823] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 890.128823] env[62204]: value = "task-1199995" [ 890.128823] env[62204]: _type = "Task" [ 890.128823] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.142351] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199995, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.201335] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199994, 'name': ReconfigVM_Task, 'duration_secs': 0.266416} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.206677] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260083', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'name': 'volume-f975a826-6b8c-4488-b353-b62effa059b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '62605b48-e640-4b4d-ab77-1ed44a75daa3', 'attached_at': '', 'detached_at': '', 'volume_id': 'f975a826-6b8c-4488-b353-b62effa059b3', 'serial': 'f975a826-6b8c-4488-b353-b62effa059b3'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 890.207125] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 890.211798] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faabd212-90fa-4c43-b902-0ab6b7a0e144 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.215843] env[62204]: INFO nova.compute.manager [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Took 42.48 seconds to build instance. [ 890.221440] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 890.221700] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-306eca06-ab30-48a5-b7af-d11e7771ee89 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.290668] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4235dc40-ee73-4b82-829e-45d6c3f8c662 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.295744] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 890.295992] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 890.296637] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleting the datastore file [datastore1] 62605b48-e640-4b4d-ab77-1ed44a75daa3 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 890.299207] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55d8b6ff-70f6-481c-82b1-73c539c1bd59 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.300846] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5d8867-7997-4614-9ef2-fca5c3d47e14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.307225] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b53e07-04c9-42f3-a0ee-f808c52d224d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.327233] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 890.327233] env[62204]: value = "task-1199997" [ 890.327233] env[62204]: _type = "Task" [ 890.327233] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.328520] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91c0ad5-45e6-436d-9ed9-6af991e10ec0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.362969] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 890.366996] env[62204]: DEBUG nova.network.neutron [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Successfully created port: 07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 890.371097] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e75d5a-48e3-490f-8030-c3b913e45ae1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.384515] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 890.392390] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.398105] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5c512a-7885-40c6-ac30-b07993670957 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.413998] env[62204]: DEBUG nova.compute.provider_tree [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.641816] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199995, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070703} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.642352] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 890.643197] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3377e7-ab51-4a2c-a366-5afda90340c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.675291] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] f445a8ea-ff21-44e9-8389-231a03c51650/f445a8ea-ff21-44e9-8389-231a03c51650.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 890.675590] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-438b6489-4f85-4140-b8ef-1e46d4b55430 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.698513] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 890.698513] env[62204]: value = "task-1199998" [ 890.698513] env[62204]: _type = "Task" [ 890.698513] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.713698] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199998, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.717396] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2d26a0ea-2346-412f-9948-d28583cb4e1e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.045s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.845416] env[62204]: DEBUG oslo_vmware.api [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1199997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159313} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.845884] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.845973] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 890.846178] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.846373] env[62204]: INFO nova.compute.manager [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Took 2.36 seconds to destroy the instance on the hypervisor. [ 890.846626] env[62204]: DEBUG oslo.service.loopingcall [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.846835] env[62204]: DEBUG nova.compute.manager [-] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.846922] env[62204]: DEBUG nova.network.neutron [-] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.895815] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-98b7d206-5fb6-4076-b50a-19b052dfc76c tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance '0a4a432d-a71a-4da7-be90-25dcec5a64c6' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 890.917010] env[62204]: DEBUG nova.scheduler.client.report [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 891.214332] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199998, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.391941] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 891.402131] env[62204]: DEBUG nova.compute.manager [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 891.406229] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b58c61-a328-4a2f-87d4-a0207a7df796 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.421449] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 891.423158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 891.423158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 891.423158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 891.423158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 891.423158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 891.427158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 891.427158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 891.427158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 891.427158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 891.427158] env[62204]: DEBUG nova.virt.hardware [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 891.427158] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.095s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.428465] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c26209-8fcf-415a-873f-3510179f9c72 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.432624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.849s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.436163] env[62204]: INFO nova.compute.claims [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 891.444028] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7439a86-494e-4785-8d7e-8b2091f64158 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.469032] env[62204]: INFO nova.scheduler.client.report [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Deleted allocations for instance 1121b1b8-127e-475f-8dfc-de43911de39a [ 891.532168] env[62204]: DEBUG nova.compute.manager [req-61821d6b-72fc-4dbf-8169-1c23baacc142 req-a56808c6-fec8-473b-b765-b5f382f3171a service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Received event network-vif-deleted-5d075538-3e94-4457-b88a-3dcde88f99db {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 891.532448] env[62204]: INFO nova.compute.manager [req-61821d6b-72fc-4dbf-8169-1c23baacc142 req-a56808c6-fec8-473b-b765-b5f382f3171a service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Neutron deleted interface 5d075538-3e94-4457-b88a-3dcde88f99db; detaching it from the instance and deleting it from the info cache [ 891.532663] env[62204]: DEBUG nova.network.neutron [req-61821d6b-72fc-4dbf-8169-1c23baacc142 req-a56808c6-fec8-473b-b765-b5f382f3171a service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.710410] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199998, 'name': ReconfigVM_Task, 'duration_secs': 0.976256} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.711096] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Reconfigured VM instance instance-00000053 to attach disk [datastore2] f445a8ea-ff21-44e9-8389-231a03c51650/f445a8ea-ff21-44e9-8389-231a03c51650.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 891.711912] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-253d360e-7fd1-4c8b-a20d-9af04678a085 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.722018] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 891.722018] env[62204]: value = "task-1199999" [ 891.722018] env[62204]: _type = "Task" [ 891.722018] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.730587] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199999, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.856931] env[62204]: DEBUG nova.network.neutron [-] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.934671] env[62204]: INFO nova.compute.manager [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] instance snapshotting [ 891.939243] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bce903-b079-41f0-b9a0-5157fbd0beae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.969135] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecaee28f-154b-42bd-91e3-31ca8d76765f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.977680] env[62204]: DEBUG nova.compute.manager [req-03a4463b-64b5-451f-9981-b352b5fbdfcb req-ba3874c6-8a0b-4c34-9537-a4b6929c5745 service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Received event network-vif-plugged-07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 891.977908] env[62204]: DEBUG oslo_concurrency.lockutils [req-03a4463b-64b5-451f-9981-b352b5fbdfcb req-ba3874c6-8a0b-4c34-9537-a4b6929c5745 service nova] Acquiring lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.978147] env[62204]: DEBUG oslo_concurrency.lockutils [req-03a4463b-64b5-451f-9981-b352b5fbdfcb req-ba3874c6-8a0b-4c34-9537-a4b6929c5745 service nova] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 891.978327] env[62204]: DEBUG oslo_concurrency.lockutils [req-03a4463b-64b5-451f-9981-b352b5fbdfcb req-ba3874c6-8a0b-4c34-9537-a4b6929c5745 service nova] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.978687] env[62204]: DEBUG nova.compute.manager [req-03a4463b-64b5-451f-9981-b352b5fbdfcb req-ba3874c6-8a0b-4c34-9537-a4b6929c5745 service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] No waiting events found dispatching network-vif-plugged-07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 891.978805] env[62204]: WARNING nova.compute.manager [req-03a4463b-64b5-451f-9981-b352b5fbdfcb req-ba3874c6-8a0b-4c34-9537-a4b6929c5745 service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Received unexpected event network-vif-plugged-07c8c254-5b9d-40bf-820d-9474821ecfd6 for instance with vm_state building and task_state spawning. [ 891.985179] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c62ba9d4-c26d-4a12-8a34-6a6f2c722d4f tempest-ListServersNegativeTestJSON-1469495009 tempest-ListServersNegativeTestJSON-1469495009-project-member] Lock "1121b1b8-127e-475f-8dfc-de43911de39a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.320s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.036437] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33b7447d-beb4-4bf2-bed5-0bbbe33dd3b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.066409] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0f8e60-4134-451a-860b-8fa94f7ba733 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.139325] env[62204]: DEBUG nova.compute.manager [req-61821d6b-72fc-4dbf-8169-1c23baacc142 req-a56808c6-fec8-473b-b765-b5f382f3171a service nova] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Detach interface failed, port_id=5d075538-3e94-4457-b88a-3dcde88f99db, reason: Instance 62605b48-e640-4b4d-ab77-1ed44a75daa3 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 892.141923] env[62204]: DEBUG nova.compute.manager [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 892.142815] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf20725f-60be-49da-a1a7-1e031a350f66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.170948] env[62204]: DEBUG nova.network.neutron [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Successfully updated port: 07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 892.229905] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1199999, 'name': Rename_Task, 'duration_secs': 0.214247} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.230203] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.230492] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5a4e9c6-4a39-42df-9647-8b4f32c29103 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.239311] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 892.239311] env[62204]: value = "task-1200000" [ 892.239311] env[62204]: _type = "Task" [ 892.239311] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.248256] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.360413] env[62204]: INFO nova.compute.manager [-] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Took 1.51 seconds to deallocate network for instance. [ 892.503803] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 892.504349] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1cbb1c87-e50e-47d9-8697-d9a36e49273a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.514775] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 892.514775] env[62204]: value = "task-1200001" [ 892.514775] env[62204]: _type = "Task" [ 892.514775] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.528306] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200001, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.660290] env[62204]: INFO nova.compute.manager [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] instance snapshotting [ 892.664602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98ac6fc-bbc5-42dc-aaff-1ebad46de0c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.672569] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.672762] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.672961] env[62204]: DEBUG nova.network.neutron [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 892.693250] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc58893-c9fc-40c6-9914-92f30667d975 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.750963] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200000, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.847158] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bfa0b2-e470-4dfd-80ea-305ab5fa920f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.857902] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7fa0db-8d4f-489a-8a0e-691b0dfafd23 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.891106] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3e017c-af5c-40e7-bc08-f9a313830f41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.904524] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1265d6-3044-4b67-b848-220fcaf5e216 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.911183] env[62204]: INFO nova.compute.manager [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Took 0.55 seconds to detach 1 volumes for instance. [ 892.927770] env[62204]: DEBUG nova.compute.provider_tree [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.028480] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200001, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.206945] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 893.207298] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-57c38dc8-3e43-4bb3-99bd-a87c555ad8cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.217052] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 893.217052] env[62204]: value = "task-1200002" [ 893.217052] env[62204]: _type = "Task" [ 893.217052] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.227561] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200002, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.253410] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200000, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.271100] env[62204]: DEBUG nova.network.neutron [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 893.432590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.433543] env[62204]: DEBUG nova.scheduler.client.report [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 893.512382] env[62204]: DEBUG nova.network.neutron [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updating instance_info_cache with network_info: [{"id": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "address": "fa:16:3e:6d:c4:e1", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07c8c254-5b", "ovs_interfaceid": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.529617] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200001, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.621652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.621951] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.622169] env[62204]: DEBUG nova.compute.manager [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Going to confirm migration 1 {{(pid=62204) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 893.728688] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200002, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.749916] env[62204]: DEBUG oslo_vmware.api [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200000, 'name': PowerOnVM_Task, 'duration_secs': 1.260019} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.750304] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.750626] env[62204]: INFO nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Took 9.07 seconds to spawn the instance on the hypervisor. [ 893.750849] env[62204]: DEBUG nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 893.751668] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf06089-168e-4c84-b092-12e43507b0cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.939636] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.940232] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 893.943018] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.400s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.943368] env[62204]: DEBUG nova.objects.instance [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'resources' on Instance uuid 031cb3ff-4a80-4961-a399-de31fc72e65b {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.996956] env[62204]: DEBUG nova.compute.manager [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Received event network-changed-07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.997545] env[62204]: DEBUG nova.compute.manager [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Refreshing instance network info cache due to event network-changed-07c8c254-5b9d-40bf-820d-9474821ecfd6. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 893.997545] env[62204]: DEBUG oslo_concurrency.lockutils [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] Acquiring lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.015841] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.016163] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Instance network_info: |[{"id": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "address": "fa:16:3e:6d:c4:e1", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07c8c254-5b", "ovs_interfaceid": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 894.016481] env[62204]: DEBUG oslo_concurrency.lockutils [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] Acquired lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.016661] env[62204]: DEBUG nova.network.neutron [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Refreshing network info cache for port 07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 894.018296] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:c4:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd7b5f1ef-d4b9-4ec3-b047-17e4cb349d25', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07c8c254-5b9d-40bf-820d-9474821ecfd6', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.026578] env[62204]: DEBUG oslo.service.loopingcall [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.033022] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.033768] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb990c64-42ec-40ac-82de-ae1a70ab608f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.056784] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200001, 'name': CreateSnapshot_Task, 'duration_secs': 1.016851} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.058458] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 894.058725] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.058725] env[62204]: value = "task-1200003" [ 894.058725] env[62204]: _type = "Task" [ 894.058725] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.059448] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6397a27-ff5c-43d3-9110-17921c4dd1a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.082999] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200003, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.222112] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 894.222112] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.222112] env[62204]: DEBUG nova.network.neutron [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 894.222112] env[62204]: DEBUG nova.objects.instance [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'info_cache' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.232329] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200002, 'name': CreateSnapshot_Task, 'duration_secs': 0.93165} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.232539] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 894.233606] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc99dae-7464-4530-890d-36b2619b957e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.276855] env[62204]: INFO nova.compute.manager [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Took 45.08 seconds to build instance. [ 894.299031] env[62204]: DEBUG nova.network.neutron [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updated VIF entry in instance network info cache for port 07c8c254-5b9d-40bf-820d-9474821ecfd6. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 894.299031] env[62204]: DEBUG nova.network.neutron [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updating instance_info_cache with network_info: [{"id": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "address": "fa:16:3e:6d:c4:e1", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07c8c254-5b", "ovs_interfaceid": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.450128] env[62204]: DEBUG nova.compute.utils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 894.453475] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 894.453475] env[62204]: DEBUG nova.network.neutron [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 894.531790] env[62204]: DEBUG nova.policy [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd17709d694e840d796ba4fca7d6d08f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43b28641aa01450b8ad70dc121642f79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 894.572134] env[62204]: DEBUG oslo_concurrency.lockutils [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.572609] env[62204]: DEBUG oslo_concurrency.lockutils [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.573112] env[62204]: INFO nova.compute.manager [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Rebooting instance [ 894.579069] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200003, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.591875] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 894.592934] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-143335cd-324a-4c5c-b33f-4407bbd8873f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.605725] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 894.605725] env[62204]: value = "task-1200004" [ 894.605725] env[62204]: _type = "Task" [ 894.605725] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.615739] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200004, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.760023] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 894.760728] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c3f39f18-c4a2-401d-9b08-b37822f3ca96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.770557] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 894.770557] env[62204]: value = "task-1200005" [ 894.770557] env[62204]: _type = "Task" [ 894.770557] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.778713] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fcc18a33-2f6b-4e4e-9259-5ec3e3c3a363 tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "f445a8ea-ff21-44e9-8389-231a03c51650" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.240s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.785260] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200005, 'name': CloneVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.800755] env[62204]: DEBUG oslo_concurrency.lockutils [req-05add762-c624-432d-a735-4b210914b543 req-f184f2e7-7f7a-49fa-ab91-2b2532e75ced service nova] Releasing lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.955758] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 895.025568] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9062b1e7-5839-4eed-a0f1-02f6033e4af3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.036724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb270c1-2071-44f7-977d-2cc3bca6549f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.075472] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043ade70-b050-4851-82f2-5a4ad3185bc7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.091744] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200003, 'name': CreateVM_Task, 'duration_secs': 0.743097} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.095642] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3708004-7594-4651-ab04-d1e14db35ef4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.104748] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.105836] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.106101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.106757] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 895.107589] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f67bf10-6776-4c1e-926a-f85d076243d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.119680] env[62204]: DEBUG oslo_concurrency.lockutils [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.120248] env[62204]: DEBUG oslo_concurrency.lockutils [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.120248] env[62204]: DEBUG nova.network.neutron [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 895.128946] env[62204]: DEBUG nova.compute.provider_tree [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.137075] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 895.137075] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52666075-d9e2-7654-b49f-1d490d94b58d" [ 895.137075] env[62204]: _type = "Task" [ 895.137075] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.142939] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200004, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.151287] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52666075-d9e2-7654-b49f-1d490d94b58d, 'name': SearchDatastore_Task, 'duration_secs': 0.011604} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.151764] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.151884] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.152140] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.152288] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.152544] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.152772] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e83a9ce0-a542-443f-94cb-8eb509370e7e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.162730] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.162969] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.164308] env[62204]: DEBUG nova.network.neutron [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Successfully created port: 7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 895.166209] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24973979-890c-4147-8493-51ebe8b6a6d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.173363] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 895.173363] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521bbe82-05bd-d788-bdf0-07a9937780a4" [ 895.173363] env[62204]: _type = "Task" [ 895.173363] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.181017] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521bbe82-05bd-d788-bdf0-07a9937780a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.282080] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200005, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.619720] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200004, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.635141] env[62204]: DEBUG nova.scheduler.client.report [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 895.686512] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521bbe82-05bd-d788-bdf0-07a9937780a4, 'name': SearchDatastore_Task, 'duration_secs': 0.010874} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.688235] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d34c27dd-930e-45fa-9677-8f73451293cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.696150] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 895.696150] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5258e04b-236f-db3f-8516-589964c68f77" [ 895.696150] env[62204]: _type = "Task" [ 895.696150] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.711034] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5258e04b-236f-db3f-8516-589964c68f77, 'name': SearchDatastore_Task, 'duration_secs': 0.009448} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.711553] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.711926] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.712809] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81ff34f4-af40-441d-88c8-1dcb9e610e94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.719375] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 895.719375] env[62204]: value = "task-1200006" [ 895.719375] env[62204]: _type = "Task" [ 895.719375] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.727899] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.782954] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200005, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.974559] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 896.028605] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 896.029016] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 896.029282] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.029544] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 896.030262] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.030262] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 896.030262] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 896.030544] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 896.030583] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 896.030759] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 896.034966] env[62204]: DEBUG nova.virt.hardware [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 896.034966] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9b5873-8509-4752-9a1e-a8193da62b4a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.041729] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acc923f-b21f-44b7-a159-31d1672bd26d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.114485] env[62204]: DEBUG nova.network.neutron [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.121474] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200004, 'name': CloneVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.149632] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.152853] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 37.396s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.153678] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.153678] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 896.153907] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.317s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.155628] env[62204]: INFO nova.compute.claims [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.164447] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e795b5-a437-48cf-b72a-a1b30fd1e54e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.179665] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f19aab-631a-4875-979b-de0e395b3712 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.197967] env[62204]: INFO nova.scheduler.client.report [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted allocations for instance 031cb3ff-4a80-4961-a399-de31fc72e65b [ 896.199018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be359e3-b609-46f0-9657-4177b583140f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.210104] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48b28ca-7dde-43fd-83f6-0c2e62b2da14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.255392] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179260MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 896.255619] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.265758] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200006, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.283342] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200005, 'name': CloneVM_Task, 'duration_secs': 1.49168} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.283652] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Created linked-clone VM from snapshot [ 896.284493] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09ebcd3-5a81-4a95-9510-758198ff3c9a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.292200] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Uploading image a9e952fa-67fa-4a49-a75c-594f33aa3496 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 896.296622] env[62204]: DEBUG nova.network.neutron [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.321393] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 896.321393] env[62204]: value = "vm-260092" [ 896.321393] env[62204]: _type = "VirtualMachine" [ 896.321393] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 896.321737] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a7cd5aef-f9fb-4da1-9f2d-1e7674503742 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.329343] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lease: (returnval){ [ 896.329343] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cee0aa-a937-c96c-9236-8006d6be6b9f" [ 896.329343] env[62204]: _type = "HttpNfcLease" [ 896.329343] env[62204]: } obtained for exporting VM: (result){ [ 896.329343] env[62204]: value = "vm-260092" [ 896.329343] env[62204]: _type = "VirtualMachine" [ 896.329343] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 896.330305] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the lease: (returnval){ [ 896.330305] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cee0aa-a937-c96c-9236-8006d6be6b9f" [ 896.330305] env[62204]: _type = "HttpNfcLease" [ 896.330305] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 896.339081] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 896.339081] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cee0aa-a937-c96c-9236-8006d6be6b9f" [ 896.339081] env[62204]: _type = "HttpNfcLease" [ 896.339081] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 896.387942] env[62204]: DEBUG nova.compute.manager [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Received event network-changed-454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 896.387942] env[62204]: DEBUG nova.compute.manager [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Refreshing instance network info cache due to event network-changed-454e3072-3434-44df-b410-3e41abc2baca. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 896.388132] env[62204]: DEBUG oslo_concurrency.lockutils [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] Acquiring lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 896.388212] env[62204]: DEBUG oslo_concurrency.lockutils [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] Acquired lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.388376] env[62204]: DEBUG nova.network.neutron [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Refreshing network info cache for port 454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 896.620810] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.621150] env[62204]: DEBUG nova.objects.instance [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'migration_context' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.623268] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200004, 'name': CloneVM_Task, 'duration_secs': 1.577083} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.623268] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Created linked-clone VM from snapshot [ 896.623617] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d549fd6a-ea41-47b8-a89e-744f042967aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.632099] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Uploading image 8bb268b3-36ef-40e8-9a00-091cb3770103 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 896.663962] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 896.663962] env[62204]: value = "vm-260091" [ 896.663962] env[62204]: _type = "VirtualMachine" [ 896.663962] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 896.667175] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-feb53b3b-7f88-42d6-a912-b5bf6b56c457 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.674643] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lease: (returnval){ [ 896.674643] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f6b468-d093-28db-c1a2-e046e8991fd7" [ 896.674643] env[62204]: _type = "HttpNfcLease" [ 896.674643] env[62204]: } obtained for exporting VM: (result){ [ 896.674643] env[62204]: value = "vm-260091" [ 896.674643] env[62204]: _type = "VirtualMachine" [ 896.674643] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 896.675036] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the lease: (returnval){ [ 896.675036] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f6b468-d093-28db-c1a2-e046e8991fd7" [ 896.675036] env[62204]: _type = "HttpNfcLease" [ 896.675036] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 896.683313] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 896.683313] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f6b468-d093-28db-c1a2-e046e8991fd7" [ 896.683313] env[62204]: _type = "HttpNfcLease" [ 896.683313] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 896.711665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e72c7573-0530-4df0-a34c-f226bfdc86f4 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "031cb3ff-4a80-4961-a399-de31fc72e65b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.283s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.739468] env[62204]: DEBUG nova.network.neutron [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Successfully updated port: 7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 896.765454] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.635755} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.765859] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.766165] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.766658] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3e294ca-788a-43ce-b942-b845f7701b4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.773956] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 896.773956] env[62204]: value = "task-1200009" [ 896.773956] env[62204]: _type = "Task" [ 896.773956] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.783513] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.799806] env[62204]: DEBUG oslo_concurrency.lockutils [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.801413] env[62204]: DEBUG nova.compute.manager [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 896.806022] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a4abe0-0153-45e4-84a7-55ce0ae10a4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.837464] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 896.837464] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cee0aa-a937-c96c-9236-8006d6be6b9f" [ 896.837464] env[62204]: _type = "HttpNfcLease" [ 896.837464] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 896.838506] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 896.838506] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cee0aa-a937-c96c-9236-8006d6be6b9f" [ 896.838506] env[62204]: _type = "HttpNfcLease" [ 896.838506] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 896.838694] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27393c11-99a9-49fd-87a1-62d4c91ab7a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.845982] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5bc50-6b92-034e-9890-8f84bedbac69/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 896.846226] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5bc50-6b92-034e-9890-8f84bedbac69/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 896.997555] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fe10d821-a918-4550-bdac-4a442fedc10e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.127527] env[62204]: DEBUG nova.objects.base [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Object Instance<0a4a432d-a71a-4da7-be90-25dcec5a64c6> lazy-loaded attributes: info_cache,migration_context {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 897.129231] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0c0602-7d4c-4fb2-bed5-3545a9d3e5f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.158503] env[62204]: DEBUG nova.network.neutron [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updated VIF entry in instance network info cache for port 454e3072-3434-44df-b410-3e41abc2baca. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 897.159251] env[62204]: DEBUG nova.network.neutron [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updating instance_info_cache with network_info: [{"id": "454e3072-3434-44df-b410-3e41abc2baca", "address": "fa:16:3e:b7:a5:94", "network": {"id": "97127721-f740-4f77-ba5e-e3cf66bd2296", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-758803428-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ce3664afab5c4ac28a6dbf1ba0fec2fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9aa05ef8-c7bb-4af5-983f-bfa0f3f88223", "external-id": "nsx-vlan-transportzone-135", "segmentation_id": 135, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap454e3072-34", "ovs_interfaceid": "454e3072-3434-44df-b410-3e41abc2baca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.160715] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1b53028-6c0d-40fe-8efc-45168f1da63d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.167922] env[62204]: DEBUG oslo_vmware.api [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 897.167922] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52caea1a-d84b-66cf-a6ca-6d5f3a9ec039" [ 897.167922] env[62204]: _type = "Task" [ 897.167922] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.183736] env[62204]: DEBUG oslo_vmware.api [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52caea1a-d84b-66cf-a6ca-6d5f3a9ec039, 'name': SearchDatastore_Task, 'duration_secs': 0.006745} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.184609] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.188219] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 897.188219] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f6b468-d093-28db-c1a2-e046e8991fd7" [ 897.188219] env[62204]: _type = "HttpNfcLease" [ 897.188219] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 897.188219] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 897.188219] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f6b468-d093-28db-c1a2-e046e8991fd7" [ 897.188219] env[62204]: _type = "HttpNfcLease" [ 897.188219] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 897.191017] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c74f617-2baa-49e6-aa08-ac2b840d79bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.199194] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5269da5f-888c-b436-d537-44b8b5fbb765/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 897.199292] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5269da5f-888c-b436-d537-44b8b5fbb765/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 897.263840] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.264018] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.264407] env[62204]: DEBUG nova.network.neutron [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.287908] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0797} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.291243] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.292839] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e2c627-bc1b-4533-9ba3-cb2019acf1a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.317276] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.327019] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1c7b213-7916-45fb-83aa-9d3daf4cf4db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.344412] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-05bf1b13-46a8-4871-85e3-bbf1ffb51346 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.350099] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 897.350099] env[62204]: value = "task-1200010" [ 897.350099] env[62204]: _type = "Task" [ 897.350099] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.358382] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200010, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.665696] env[62204]: DEBUG oslo_concurrency.lockutils [req-5661dbbb-10b3-4217-a719-c1a46dd46a1c req-dbdd9235-0944-47bd-a79b-788b5466680d service nova] Releasing lock "refresh_cache-f445a8ea-ff21-44e9-8389-231a03c51650" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 897.676391] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0304a984-e3b3-4a88-a417-926cc7bc532b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.684055] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9dd4dd-de0e-4bd5-bff1-71926481f53f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.715393] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d949c19e-8ba6-4c27-9a13-d843b5a1b8ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.723324] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5106b00-4ea8-4157-891f-ce1254d9febb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.737846] env[62204]: DEBUG nova.compute.provider_tree [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.800995] env[62204]: DEBUG nova.network.neutron [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 897.848375] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1805f7e6-68db-41d3-9afa-405a692f15b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.867339] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Doing hard reboot of VM {{(pid=62204) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 897.871721] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-c24ce470-90c6-4388-8ed2-e3d9150a5196 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.873774] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200010, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.880184] env[62204]: DEBUG oslo_vmware.api [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 897.880184] env[62204]: value = "task-1200011" [ 897.880184] env[62204]: _type = "Task" [ 897.880184] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.889922] env[62204]: DEBUG oslo_vmware.api [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200011, 'name': ResetVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.988806] env[62204]: DEBUG nova.network.neutron [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.240822] env[62204]: DEBUG nova.scheduler.client.report [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 898.369439] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200010, 'name': ReconfigVM_Task, 'duration_secs': 0.568707} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.369812] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.370565] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84ba1cf5-539c-45da-a19a-223cbaa544d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.378380] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 898.378380] env[62204]: value = "task-1200012" [ 898.378380] env[62204]: _type = "Task" [ 898.378380] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.390312] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200012, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.393981] env[62204]: DEBUG oslo_vmware.api [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200011, 'name': ResetVM_Task, 'duration_secs': 0.123328} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.394401] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Did hard reboot of VM {{(pid=62204) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 898.394614] env[62204]: DEBUG nova.compute.manager [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 898.395547] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c682a475-78ce-42f9-bda6-a0724c0aa1af {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.447416] env[62204]: DEBUG nova.compute.manager [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received event network-vif-plugged-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.447416] env[62204]: DEBUG oslo_concurrency.lockutils [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.447416] env[62204]: DEBUG oslo_concurrency.lockutils [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.447698] env[62204]: DEBUG oslo_concurrency.lockutils [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.447976] env[62204]: DEBUG nova.compute.manager [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] No waiting events found dispatching network-vif-plugged-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 898.448260] env[62204]: WARNING nova.compute.manager [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received unexpected event network-vif-plugged-7394819f-3d04-4685-a087-5a61976b658a for instance with vm_state building and task_state spawning. [ 898.448502] env[62204]: DEBUG nova.compute.manager [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received event network-changed-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 898.448704] env[62204]: DEBUG nova.compute.manager [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing instance network info cache due to event network-changed-7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 898.448933] env[62204]: DEBUG oslo_concurrency.lockutils [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.491576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 898.491924] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Instance network_info: |[{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 898.492285] env[62204]: DEBUG oslo_concurrency.lockutils [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.492478] env[62204]: DEBUG nova.network.neutron [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing network info cache for port 7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 898.493889] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:f4:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7394819f-3d04-4685-a087-5a61976b658a', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.503037] env[62204]: DEBUG oslo.service.loopingcall [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.504876] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 898.505388] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4113eb6e-6c62-489a-a48d-83a2d6068878 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.527695] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.527695] env[62204]: value = "task-1200013" [ 898.527695] env[62204]: _type = "Task" [ 898.527695] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.537279] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200013, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.750827] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.753018] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 898.755356] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.122s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.756347] env[62204]: DEBUG nova.objects.instance [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'pci_requests' on Instance uuid 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 898.889768] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200012, 'name': Rename_Task, 'duration_secs': 0.217538} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.890330] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 898.890521] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c402f373-6077-4d86-81f6-45ba6a845999 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.898620] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 898.898620] env[62204]: value = "task-1200014" [ 898.898620] env[62204]: _type = "Task" [ 898.898620] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.912116] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.913072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-dce47677-bd53-4585-8899-7b0558a8dc25 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.340s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.038184] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200013, 'name': CreateVM_Task, 'duration_secs': 0.460023} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.038526] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 899.039419] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.039629] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.040177] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 899.040582] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89a2f6e8-4a93-47ee-b185-3643bc3981ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.046826] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 899.046826] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c6482-209e-9424-0cdd-47f684316779" [ 899.046826] env[62204]: _type = "Task" [ 899.046826] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.055595] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c6482-209e-9424-0cdd-47f684316779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.233873] env[62204]: DEBUG nova.network.neutron [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updated VIF entry in instance network info cache for port 7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 899.234388] env[62204]: DEBUG nova.network.neutron [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.259653] env[62204]: DEBUG nova.compute.utils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.261084] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 899.261259] env[62204]: DEBUG nova.network.neutron [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 899.265100] env[62204]: DEBUG nova.objects.instance [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'numa_topology' on Instance uuid 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.309421] env[62204]: DEBUG nova.policy [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 899.409388] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200014, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.557677] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520c6482-209e-9424-0cdd-47f684316779, 'name': SearchDatastore_Task, 'duration_secs': 0.013581} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.558017] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.558274] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.558529] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 899.558676] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.558858] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.559157] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b93c45c1-30cc-4e92-94a5-56d5904ea9a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.568759] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.568962] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.569882] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47b7914e-67b3-4879-bb5f-892570e3a552 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.575513] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 899.575513] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f972e9-607d-d802-e538-16041337c064" [ 899.575513] env[62204]: _type = "Task" [ 899.575513] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.583753] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f972e9-607d-d802-e538-16041337c064, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.630586] env[62204]: DEBUG nova.network.neutron [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Successfully created port: 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 899.737875] env[62204]: DEBUG oslo_concurrency.lockutils [req-ad0f507b-1373-4360-a95b-a718ce588134 req-42d0b0fb-3389-47cc-9c42-cf856586d0a6 service nova] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.768038] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 899.774696] env[62204]: INFO nova.compute.claims [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.909771] env[62204]: DEBUG oslo_vmware.api [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200014, 'name': PowerOnVM_Task, 'duration_secs': 0.635317} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.910238] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 899.910329] env[62204]: INFO nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Took 8.52 seconds to spawn the instance on the hypervisor. [ 899.910467] env[62204]: DEBUG nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.911287] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19483bc6-fc69-47bd-97f5-67ef468c7bf8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.086378] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f972e9-607d-d802-e538-16041337c064, 'name': SearchDatastore_Task, 'duration_secs': 0.014752} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.087213] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9058d10-c3b1-419f-968a-f5f836e504f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.092895] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 900.092895] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5236626d-faa4-b9a4-a475-ca01249cdfd2" [ 900.092895] env[62204]: _type = "Task" [ 900.092895] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.101053] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5236626d-faa4-b9a4-a475-ca01249cdfd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.428765] env[62204]: INFO nova.compute.manager [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Took 47.01 seconds to build instance. [ 900.603335] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5236626d-faa4-b9a4-a475-ca01249cdfd2, 'name': SearchDatastore_Task, 'duration_secs': 0.012063} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.603646] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 900.603917] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/d97d792d-614f-42e3-8516-6c0a7cf15ad5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 900.604231] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab9834b1-a725-4021-a01c-82b3d5d4be1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.611675] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 900.611675] env[62204]: value = "task-1200015" [ 900.611675] env[62204]: _type = "Task" [ 900.611675] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.620318] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200015, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.780973] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 900.931283] env[62204]: DEBUG oslo_concurrency.lockutils [None req-00700f18-ec96-4602-88c4-80c1d12f1950 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.315s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.122585] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200015, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.190098] env[62204]: DEBUG nova.network.neutron [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Successfully updated port: 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 901.240158] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8497ace4-263b-477b-ae83-2c852dfd4949 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.249772] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfb2cb1-59ed-423d-a113-98e4e927a8de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.282912] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77554c38-4df8-48ce-baa6-d03531c366fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.291196] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0c8695-5b6c-4fab-a4d3-cf0b460d7164 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.306616] env[62204]: DEBUG nova.compute.provider_tree [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.624711] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200015, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711387} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.625065] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/d97d792d-614f-42e3-8516-6c0a7cf15ad5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 901.625320] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 901.625692] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb01f305-ce89-4936-9bd8-c7e23259989d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.633431] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 901.633431] env[62204]: value = "task-1200016" [ 901.633431] env[62204]: _type = "Task" [ 901.633431] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.642075] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200016, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.697239] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.697338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.697529] env[62204]: DEBUG nova.network.neutron [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 901.810616] env[62204]: DEBUG nova.scheduler.client.report [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 902.145223] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200016, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076056} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.145706] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 902.146653] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc45c5f-fc0a-4640-9a3c-b05333d92cbd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.170137] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/d97d792d-614f-42e3-8516-6c0a7cf15ad5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 902.170513] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d0fd490-a18d-48bb-8882-d99186196573 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.191459] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 902.191459] env[62204]: value = "task-1200017" [ 902.191459] env[62204]: _type = "Task" [ 902.191459] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.201866] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200017, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.232454] env[62204]: DEBUG nova.network.neutron [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 902.317846] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.562s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.320475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.528s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.322465] env[62204]: INFO nova.compute.claims [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.377787] env[62204]: INFO nova.network.neutron [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating port 52d592a0-434a-4f17-8db6-39bf5d505429 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 902.397419] env[62204]: DEBUG nova.network.neutron [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.702758] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200017, 'name': ReconfigVM_Task, 'duration_secs': 0.429798} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.703194] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfigured VM instance instance-00000055 to attach disk [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/d97d792d-614f-42e3-8516-6c0a7cf15ad5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.703910] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01196c2a-9248-4663-b231-9d801ec758e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.711063] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 902.711063] env[62204]: value = "task-1200018" [ 902.711063] env[62204]: _type = "Task" [ 902.711063] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.720227] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200018, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.900415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.903019] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Instance network_info: |[{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 903.222248] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200018, 'name': Rename_Task, 'duration_secs': 0.306225} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.222626] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.222878] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20c49721-0401-4986-b4fd-3fda635889bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.229582] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 903.229582] env[62204]: value = "task-1200019" [ 903.229582] env[62204]: _type = "Task" [ 903.229582] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.237996] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200019, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.668224] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679b15f8-4a71-4abe-af23-46ea2e2c11e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.677724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d9c0e2-6e03-43f2-9238-9297bb5e67c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.709634] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daadb55b-2cbf-49e6-ae2b-cf84b9b5f14d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.717172] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acc1111-34c7-4e0a-9934-aea461b6ff39 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.731218] env[62204]: DEBUG nova.compute.provider_tree [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.740779] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200019, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.929633] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.929985] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.930148] env[62204]: DEBUG nova.network.neutron [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 904.236858] env[62204]: DEBUG nova.scheduler.client.report [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 904.247222] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200019, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.693598] env[62204]: DEBUG nova.network.neutron [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.742843] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 904.743443] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 904.746567] env[62204]: DEBUG oslo_vmware.api [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200019, 'name': PowerOnVM_Task, 'duration_secs': 1.208176} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.746922] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.638s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 904.747155] env[62204]: DEBUG nova.objects.instance [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 904.750000] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.750000] env[62204]: INFO nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Took 8.78 seconds to spawn the instance on the hypervisor. [ 904.750171] env[62204]: DEBUG nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 904.751056] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c24e7a4-62ac-4cee-a6cb-1969ee083419 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.143062] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.143062] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.143252] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.143406] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.143714] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.143764] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.144516] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.144516] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.144516] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.144788] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.144971] env[62204]: DEBUG nova.virt.hardware [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.147408] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1109839-5625-4c48-a64f-9bcf829e4def {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.157148] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5bc50-6b92-034e-9890-8f84bedbac69/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 905.158601] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8432b9f3-1a82-4855-b1e0-bc79f9c486c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.162933] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa02a3f-0e19-49fd-b78a-a33030274821 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.177724] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:8b:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55c5fd12-e601-44a8-ab4f-2fb4f263333e', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.183972] env[62204]: DEBUG oslo.service.loopingcall [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.186026] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.186178] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5bc50-6b92-034e-9890-8f84bedbac69/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 905.186237] env[62204]: ERROR oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5bc50-6b92-034e-9890-8f84bedbac69/disk-0.vmdk due to incomplete transfer. [ 905.186475] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83418379-48a7-415f-b963-6424f29c9a39 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.200933] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-202ef326-5878-422c-bd71-573c1264563e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.203039] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.212676] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.212676] env[62204]: value = "task-1200020" [ 905.212676] env[62204]: _type = "Task" [ 905.212676] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.213960] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d5bc50-6b92-034e-9890-8f84bedbac69/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 905.214204] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Uploaded image a9e952fa-67fa-4a49-a75c-594f33aa3496 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 905.216789] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 905.220069] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-424aa166-91dd-4d63-bd7d-ec151055bb65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.228951] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200020, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.232195] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4bc7a5e1619acb4706f40b609f60205a',container_format='bare',created_at=2024-10-08T23:41:06Z,direct_url=,disk_format='vmdk',id=c2930f97-8cc3-4e0b-b082-ac3975f12ee6,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1052961107-shelved',owner='8108a8f6b5e04832aab188333bad1e0e',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-10-08T23:41:22Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.232449] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.232623] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.232807] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.232956] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.233263] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.233331] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.233526] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.233677] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.233834] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.234057] env[62204]: DEBUG nova.virt.hardware [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.234416] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 905.234416] env[62204]: value = "task-1200021" [ 905.234416] env[62204]: _type = "Task" [ 905.234416] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.235134] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f698ee-54ed-424b-a626-8d86535c1a0d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.248343] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a62d95d-89c5-406b-8a89-657222f81e09 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.253398] env[62204]: DEBUG nova.compute.utils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.255063] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200021, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.257968] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 905.258260] env[62204]: DEBUG nova.network.neutron [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.280026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:f8:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52d592a0-434a-4f17-8db6-39bf5d505429', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.286271] env[62204]: DEBUG oslo.service.loopingcall [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.288700] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.288935] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c67617b5-fb46-4446-9153-7d9b8e8b95a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.304108] env[62204]: INFO nova.compute.manager [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Took 48.75 seconds to build instance. [ 905.310543] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.310543] env[62204]: value = "task-1200022" [ 905.310543] env[62204]: _type = "Task" [ 905.310543] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.320573] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200022, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.389410] env[62204]: DEBUG nova.policy [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 905.728083] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200020, 'name': CreateVM_Task, 'duration_secs': 0.493016} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.728083] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.728083] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.728083] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.728083] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 905.729761] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dee29ef-a8c2-42cd-a200-3b35ee0512cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.735614] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 905.735614] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dabf15-5a82-5b15-67e8-a82532bc044c" [ 905.735614] env[62204]: _type = "Task" [ 905.735614] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.752438] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200021, 'name': Destroy_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.752438] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dabf15-5a82-5b15-67e8-a82532bc044c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.762907] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 905.766733] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c30a9a28-4b1f-462a-9cf2-1da13b7f214d tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.768379] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.751s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 905.768753] env[62204]: DEBUG nova.objects.instance [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lazy-loading 'resources' on Instance uuid 0a720922-60ea-4b31-ba56-cdcbba1ab629 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 905.806957] env[62204]: DEBUG oslo_concurrency.lockutils [None req-63fef283-90ab-4602-a2f6-0420ba69eac9 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.261s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.824344] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200022, 'name': CreateVM_Task, 'duration_secs': 0.503562} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.824344] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 905.824940] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.825761] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.827078] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 905.828660] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6612f76-899f-4e17-9e51-fc112c1d9b66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.836931] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 905.836931] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e29bb5-8a17-7d41-36c8-3719b9a957cd" [ 905.836931] env[62204]: _type = "Task" [ 905.836931] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.845417] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e29bb5-8a17-7d41-36c8-3719b9a957cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.921288] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5269da5f-888c-b436-d537-44b8b5fbb765/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 905.922353] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f776ed-7086-43c9-87c8-0855dd3742ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.929034] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5269da5f-888c-b436-d537-44b8b5fbb765/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 905.929034] env[62204]: ERROR oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5269da5f-888c-b436-d537-44b8b5fbb765/disk-0.vmdk due to incomplete transfer. [ 905.929281] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fd21f8af-8956-4cbe-a33b-2983d6c385a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.937222] env[62204]: DEBUG oslo_vmware.rw_handles [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5269da5f-888c-b436-d537-44b8b5fbb765/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 905.937783] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Uploaded image 8bb268b3-36ef-40e8-9a00-091cb3770103 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 905.939462] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 905.939810] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-62918f3c-217c-40a2-94db-8a09b57f3396 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.946627] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 905.946627] env[62204]: value = "task-1200023" [ 905.946627] env[62204]: _type = "Task" [ 905.946627] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.956510] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200023, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.971982] env[62204]: DEBUG nova.network.neutron [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Successfully created port: 4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.251033] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200021, 'name': Destroy_Task, 'duration_secs': 0.678237} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.255314] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Destroyed the VM [ 906.255625] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 906.256203] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dabf15-5a82-5b15-67e8-a82532bc044c, 'name': SearchDatastore_Task, 'duration_secs': 0.018341} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.256407] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-01d052b5-6b9c-482e-b718-d6fdb8cd8f0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.258323] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.258561] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.258798] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.258946] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.259155] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.259425] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0d23758-4422-4294-a648-fb4fb4d63c58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.265922] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 906.265922] env[62204]: value = "task-1200024" [ 906.265922] env[62204]: _type = "Task" [ 906.265922] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.276827] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.277016] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.280769] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a94af1da-378c-4c60-b131-7c39c9ed5051 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.286750] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200024, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.290601] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 906.290601] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202a7f2-bffb-bdf1-b08a-f9a716a7e751" [ 906.290601] env[62204]: _type = "Task" [ 906.290601] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.298690] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202a7f2-bffb-bdf1-b08a-f9a716a7e751, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.351495] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.351805] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Processing image c2930f97-8cc3-4e0b-b082-ac3975f12ee6 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.352056] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.352215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.352400] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.352963] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d395c418-975f-4143-b775-c064c89785ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.361618] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.361869] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.365835] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5aaef22-438e-4613-bde4-bdac7dc0083a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.371304] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 906.371304] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5263d105-da96-0dc9-ee2d-8f9d37de134b" [ 906.371304] env[62204]: _type = "Task" [ 906.371304] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.380227] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5263d105-da96-0dc9-ee2d-8f9d37de134b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.399709] env[62204]: DEBUG nova.compute.manager [req-54ab84c4-677e-4e30-9369-61e45c46e8b4 req-2f391ad7-5577-429b-a229-9d574b3f895d service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-vif-plugged-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.399933] env[62204]: DEBUG oslo_concurrency.lockutils [req-54ab84c4-677e-4e30-9369-61e45c46e8b4 req-2f391ad7-5577-429b-a229-9d574b3f895d service nova] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.400164] env[62204]: DEBUG oslo_concurrency.lockutils [req-54ab84c4-677e-4e30-9369-61e45c46e8b4 req-2f391ad7-5577-429b-a229-9d574b3f895d service nova] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.400342] env[62204]: DEBUG oslo_concurrency.lockutils [req-54ab84c4-677e-4e30-9369-61e45c46e8b4 req-2f391ad7-5577-429b-a229-9d574b3f895d service nova] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.400520] env[62204]: DEBUG nova.compute.manager [req-54ab84c4-677e-4e30-9369-61e45c46e8b4 req-2f391ad7-5577-429b-a229-9d574b3f895d service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] No waiting events found dispatching network-vif-plugged-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 906.400872] env[62204]: WARNING nova.compute.manager [req-54ab84c4-677e-4e30-9369-61e45c46e8b4 req-2f391ad7-5577-429b-a229-9d574b3f895d service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received unexpected event network-vif-plugged-55c5fd12-e601-44a8-ab4f-2fb4f263333e for instance with vm_state building and task_state spawning. [ 906.423603] env[62204]: DEBUG nova.compute.manager [req-603fe26a-4e08-4557-af96-a518b51437af req-bce71939-dbfe-44e6-b720-62faddf9058e service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-vif-plugged-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.425952] env[62204]: DEBUG oslo_concurrency.lockutils [req-603fe26a-4e08-4557-af96-a518b51437af req-bce71939-dbfe-44e6-b720-62faddf9058e service nova] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.426552] env[62204]: DEBUG oslo_concurrency.lockutils [req-603fe26a-4e08-4557-af96-a518b51437af req-bce71939-dbfe-44e6-b720-62faddf9058e service nova] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.426552] env[62204]: DEBUG oslo_concurrency.lockutils [req-603fe26a-4e08-4557-af96-a518b51437af req-bce71939-dbfe-44e6-b720-62faddf9058e service nova] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.426696] env[62204]: DEBUG nova.compute.manager [req-603fe26a-4e08-4557-af96-a518b51437af req-bce71939-dbfe-44e6-b720-62faddf9058e service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] No waiting events found dispatching network-vif-plugged-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 906.426896] env[62204]: WARNING nova.compute.manager [req-603fe26a-4e08-4557-af96-a518b51437af req-bce71939-dbfe-44e6-b720-62faddf9058e service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received unexpected event network-vif-plugged-52d592a0-434a-4f17-8db6-39bf5d505429 for instance with vm_state shelved_offloaded and task_state spawning. [ 906.457043] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200023, 'name': Destroy_Task, 'duration_secs': 0.345029} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.462229] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Destroyed the VM [ 906.462229] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 906.462229] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c5f1d232-43e3-4009-b7f1-1e1038dbf706 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.467983] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 906.467983] env[62204]: value = "task-1200025" [ 906.467983] env[62204]: _type = "Task" [ 906.467983] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.479986] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200025, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.685053] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4263f9-bcac-4119-b231-51351bb794a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.693295] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac0f604-33c3-441f-8101-d89a019dfd1f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.724861] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c77b43-ab72-4f4d-88b2-87be31aae120 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.732965] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34555c93-e20d-4662-9434-20342b7598d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.748308] env[62204]: DEBUG nova.compute.provider_tree [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 906.777365] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200024, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.779591] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 906.802742] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202a7f2-bffb-bdf1-b08a-f9a716a7e751, 'name': SearchDatastore_Task, 'duration_secs': 0.028749} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.804071] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03b3a565-e200-4be8-822e-27c9868735c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.809869] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.810311] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.810497] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.810693] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.810841] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.811566] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.811805] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.812031] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.812175] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.812352] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.812538] env[62204]: DEBUG nova.virt.hardware [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.813443] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdac659-6c50-4ce6-85cd-32e505cef2e0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.819509] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 906.819509] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a9d675-9a0c-bd67-9f53-00dd88857545" [ 906.819509] env[62204]: _type = "Task" [ 906.819509] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.826874] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4534c0c2-0f17-4006-8400-b8354d0d4847 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.837579] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a9d675-9a0c-bd67-9f53-00dd88857545, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.881996] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 906.882370] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Fetch image to [datastore1] OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137/OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 906.882570] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Downloading stream optimized image c2930f97-8cc3-4e0b-b082-ac3975f12ee6 to [datastore1] OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137/OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137.vmdk on the data store datastore1 as vApp {{(pid=62204) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 906.883431] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Downloading image file data c2930f97-8cc3-4e0b-b082-ac3975f12ee6 to the ESX as VM named 'OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137' {{(pid=62204) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 906.974975] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 906.974975] env[62204]: value = "resgroup-9" [ 906.974975] env[62204]: _type = "ResourcePool" [ 906.974975] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 906.979024] env[62204]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-aee49673-43b7-4191-9203-31b0f7726402 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.001320] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200025, 'name': RemoveSnapshot_Task} progress is 16%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.008172] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease: (returnval){ [ 907.008172] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529604ec-7fb0-de2a-e0a9-98373d1ce701" [ 907.008172] env[62204]: _type = "HttpNfcLease" [ 907.008172] env[62204]: } obtained for vApp import into resource pool (val){ [ 907.008172] env[62204]: value = "resgroup-9" [ 907.008172] env[62204]: _type = "ResourcePool" [ 907.008172] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 907.008741] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the lease: (returnval){ [ 907.008741] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529604ec-7fb0-de2a-e0a9-98373d1ce701" [ 907.008741] env[62204]: _type = "HttpNfcLease" [ 907.008741] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 907.017194] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 907.017194] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529604ec-7fb0-de2a-e0a9-98373d1ce701" [ 907.017194] env[62204]: _type = "HttpNfcLease" [ 907.017194] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 907.150925] env[62204]: INFO nova.compute.manager [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Rescuing [ 907.151705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.151705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.151878] env[62204]: DEBUG nova.network.neutron [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 907.209769] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.210100] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.252164] env[62204]: DEBUG nova.scheduler.client.report [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 907.279243] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200024, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.332804] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a9d675-9a0c-bd67-9f53-00dd88857545, 'name': SearchDatastore_Task, 'duration_secs': 0.017667} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.333127] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.333597] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 274285e5-fc23-48b4-b0d6-5a67bc764d78/274285e5-fc23-48b4-b0d6-5a67bc764d78.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.333966] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1234500-717f-4253-b466-7dcfa2a62293 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.341020] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 907.341020] env[62204]: value = "task-1200027" [ 907.341020] env[62204]: _type = "Task" [ 907.341020] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.357809] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.481319] env[62204]: DEBUG oslo_vmware.api [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200025, 'name': RemoveSnapshot_Task, 'duration_secs': 0.568669} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.481686] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 907.481946] env[62204]: INFO nova.compute.manager [None req-154d42c8-2f7f-47ff-ae97-b285f54bc681 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Took 15.54 seconds to snapshot the instance on the hypervisor. [ 907.520715] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 907.520715] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529604ec-7fb0-de2a-e0a9-98373d1ce701" [ 907.520715] env[62204]: _type = "HttpNfcLease" [ 907.520715] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 907.706254] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.706648] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.714293] env[62204]: DEBUG nova.compute.utils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 907.760319] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 907.763041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.651s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.763319] env[62204]: DEBUG nova.objects.instance [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lazy-loading 'resources' on Instance uuid 51c9e353-f2cf-41b4-b37e-1cfd5dca0518 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.784167] env[62204]: DEBUG oslo_vmware.api [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200024, 'name': RemoveSnapshot_Task, 'duration_secs': 1.113062} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.784800] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 907.785041] env[62204]: INFO nova.compute.manager [None req-c6c081f7-fae3-440c-b996-2722390b1089 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Took 15.12 seconds to snapshot the instance on the hypervisor. [ 907.791679] env[62204]: INFO nova.scheduler.client.report [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Deleted allocations for instance 0a720922-60ea-4b31-ba56-cdcbba1ab629 [ 907.853960] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200027, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.918562] env[62204]: DEBUG nova.network.neutron [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updating instance_info_cache with network_info: [{"id": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "address": "fa:16:3e:6d:c4:e1", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07c8c254-5b", "ovs_interfaceid": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.932982] env[62204]: DEBUG nova.network.neutron [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Successfully updated port: 4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.018358] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.018358] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529604ec-7fb0-de2a-e0a9-98373d1ce701" [ 908.018358] env[62204]: _type = "HttpNfcLease" [ 908.018358] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 908.018759] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 908.018759] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529604ec-7fb0-de2a-e0a9-98373d1ce701" [ 908.018759] env[62204]: _type = "HttpNfcLease" [ 908.018759] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 908.019760] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d2acad-6eea-4955-92cb-7710d3838e62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.026893] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5223980e-0b2a-789e-8d58-0f9b379dc0aa/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 908.027083] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5223980e-0b2a-789e-8d58-0f9b379dc0aa/disk-0.vmdk. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 908.092184] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-aa237761-ef81-49a6-9984-7fc3badda301 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.211791] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 908.217846] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.301862] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c5230780-042f-4eec-87c4-d7a71001fefc tempest-ServerShowV257Test-630479836 tempest-ServerShowV257Test-630479836-project-member] Lock "0a720922-60ea-4b31-ba56-cdcbba1ab629" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.084s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.352380] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541043} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.352696] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 274285e5-fc23-48b4-b0d6-5a67bc764d78/274285e5-fc23-48b4-b0d6-5a67bc764d78.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 908.352922] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 908.353200] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2f84d15-ee6f-4102-94f5-ea2c80720f16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.360044] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 908.360044] env[62204]: value = "task-1200028" [ 908.360044] env[62204]: _type = "Task" [ 908.360044] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.370412] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.421342] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.436024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-7a0e579d-38e7-4f04-bf4d-1076dfc3b374" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.436024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-7a0e579d-38e7-4f04-bf4d-1076dfc3b374" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.436024] env[62204]: DEBUG nova.network.neutron [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.639655] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa8864b-fdfd-4ece-b73b-083486af141e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.647778] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d131424-109d-429f-927b-4702bc87635b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.678969] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba45356-7000-45d1-8a30-fbc5aac77452 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.687410] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed557257-3897-4ba1-ac3f-7740379bcb04 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.703666] env[62204]: DEBUG nova.compute.provider_tree [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.731786] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.767881] env[62204]: DEBUG nova.compute.manager [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.768378] env[62204]: DEBUG nova.compute.manager [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing instance network info cache due to event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 908.768437] env[62204]: DEBUG oslo_concurrency.lockutils [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.768570] env[62204]: DEBUG oslo_concurrency.lockutils [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.768970] env[62204]: DEBUG nova.network.neutron [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 908.796801] env[62204]: DEBUG nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.796801] env[62204]: DEBUG nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing instance network info cache due to event network-changed-52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 908.796801] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Acquiring lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.796801] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Acquired lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.796801] env[62204]: DEBUG nova.network.neutron [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Refreshing network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 908.885026] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069678} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.887712] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 908.889504] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6f39cb-a86e-47c8-859f-b4ec1521ae07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.914127] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 274285e5-fc23-48b4-b0d6-5a67bc764d78/274285e5-fc23-48b4-b0d6-5a67bc764d78.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.919194] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a378d0b3-1d24-4ddd-8740-b55fb898085d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.948924] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 908.948924] env[62204]: value = "task-1200029" [ 908.948924] env[62204]: _type = "Task" [ 908.948924] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.962069] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.984512] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.984978] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe8c775a-e863-4309-a487-587e37924688 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.991659] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 908.991659] env[62204]: value = "task-1200030" [ 908.991659] env[62204]: _type = "Task" [ 908.991659] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.002426] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.009549] env[62204]: DEBUG nova.network.neutron [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.207508] env[62204]: DEBUG nova.scheduler.client.report [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 909.298339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.298806] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.299153] env[62204]: INFO nova.compute.manager [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Attaching volume c03fb060-d8ba-44fe-b529-4e52b7dc7047 to /dev/sdb [ 909.352130] env[62204]: DEBUG nova.network.neutron [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Updating instance_info_cache with network_info: [{"id": "4629653e-3fb2-4a37-b37d-0435f854c4bf", "address": "fa:16:3e:26:4a:95", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4629653e-3f", "ovs_interfaceid": "4629653e-3fb2-4a37-b37d-0435f854c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.360614] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc042ead-652e-4923-8164-3f552a7a21ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.375420] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616bb47d-6621-4469-9e0f-1e1848a923c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.380124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.380124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.380536] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.380761] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.380938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.383184] env[62204]: INFO nova.compute.manager [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Terminating instance [ 909.388292] env[62204]: DEBUG nova.compute.manager [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 909.388499] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.390896] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14296d70-5e21-4ee1-9da5-0ab4c8a97047 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.402247] env[62204]: DEBUG nova.virt.block_device [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updating existing volume attachment record: 32173b21-21a3-4aff-9d40-6491e7dfbb2d {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 909.414023] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.414023] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9279db7-f3c3-4a98-90e3-a2760ff6a585 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.422163] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 909.422163] env[62204]: value = "task-1200031" [ 909.422163] env[62204]: _type = "Task" [ 909.422163] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.434447] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.464405] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200029, 'name': ReconfigVM_Task, 'duration_secs': 0.418517} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.472941] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 274285e5-fc23-48b4-b0d6-5a67bc764d78/274285e5-fc23-48b4-b0d6-5a67bc764d78.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 909.474101] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce6c31f7-e71d-4565-83d2-27df73050ddb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.482894] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 909.482894] env[62204]: value = "task-1200032" [ 909.482894] env[62204]: _type = "Task" [ 909.482894] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.493384] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200032, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.494870] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 909.494870] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5223980e-0b2a-789e-8d58-0f9b379dc0aa/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 909.498808] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55446523-c915-4f3c-a8d2-db0cc8f0ec3f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.507752] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200030, 'name': PowerOffVM_Task, 'duration_secs': 0.308053} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.509849] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.510225] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5223980e-0b2a-789e-8d58-0f9b379dc0aa/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 909.510465] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5223980e-0b2a-789e-8d58-0f9b379dc0aa/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 909.511387] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8f4a0a-482b-47ff-abe7-6c142bc11c45 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.514512] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4522ce81-6008-40a9-9d04-85bfc0423eef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.536480] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806a8b52-f886-422b-9f15-50c418fa01e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.564461] env[62204]: DEBUG nova.network.neutron [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updated VIF entry in instance network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 909.564842] env[62204]: DEBUG nova.network.neutron [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.577065] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.577065] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b343be14-35c0-452e-a683-1d004f803d61 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.583444] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 909.583444] env[62204]: value = "task-1200034" [ 909.583444] env[62204]: _type = "Task" [ 909.583444] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.595415] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 909.595415] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 909.595415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.595415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.595415] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.595907] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-485e9966-bf55-4c37-8fc2-728f9f77e477 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.605898] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.606835] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 909.607314] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61e52ba2-f102-4ac5-9fe3-446d2f341e24 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.612160] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 909.612160] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e54d44-797e-6c63-378d-887057dd9051" [ 909.612160] env[62204]: _type = "Task" [ 909.612160] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.620296] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e54d44-797e-6c63-378d-887057dd9051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.717927] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.955s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.720665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.291s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.721234] env[62204]: DEBUG nova.objects.instance [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lazy-loading 'resources' on Instance uuid 211ca0c1-cf05-4148-ad5c-46cbbd72278e {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 909.760173] env[62204]: INFO nova.scheduler.client.report [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Deleted allocations for instance 51c9e353-f2cf-41b4-b37e-1cfd5dca0518 [ 909.780128] env[62204]: DEBUG oslo_vmware.rw_handles [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5223980e-0b2a-789e-8d58-0f9b379dc0aa/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 909.780128] env[62204]: INFO nova.virt.vmwareapi.images [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Downloaded image file data c2930f97-8cc3-4e0b-b082-ac3975f12ee6 [ 909.780989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a024cadf-743b-4544-9308-9eb8c3677b0f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.807831] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb933c49-9e3c-45f6-ab1d-6b7b2ecc5080 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.816857] env[62204]: DEBUG nova.network.neutron [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updated VIF entry in instance network info cache for port 52d592a0-434a-4f17-8db6-39bf5d505429. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 909.816857] env[62204]: DEBUG nova.network.neutron [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [{"id": "52d592a0-434a-4f17-8db6-39bf5d505429", "address": "fa:16:3e:98:f8:77", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.224", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52d592a0-43", "ovs_interfaceid": "52d592a0-434a-4f17-8db6-39bf5d505429", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.839946] env[62204]: INFO nova.virt.vmwareapi.images [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] The imported VM was unregistered [ 909.845145] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 909.845489] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating directory with path [datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 909.846285] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a58c267-1305-4b05-b6ee-5fe2cbb776d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.858753] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created directory with path [datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 909.858965] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137/OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137.vmdk to [datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk. {{(pid=62204) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 909.859244] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-23ee348a-bd55-4193-8f3a-f0d63a695db1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.868156] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 909.868156] env[62204]: value = "task-1200038" [ 909.868156] env[62204]: _type = "Task" [ 909.868156] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.868735] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-7a0e579d-38e7-4f04-bf4d-1076dfc3b374" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.869101] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Instance network_info: |[{"id": "4629653e-3fb2-4a37-b37d-0435f854c4bf", "address": "fa:16:3e:26:4a:95", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4629653e-3f", "ovs_interfaceid": "4629653e-3fb2-4a37-b37d-0435f854c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 909.872636] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:4a:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4629653e-3fb2-4a37-b37d-0435f854c4bf', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 909.881085] env[62204]: DEBUG oslo.service.loopingcall [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 909.881085] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 909.881797] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5e13a94-de8e-4a05-89ad-d70c5b3cda35 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.902577] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.908481] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 909.908481] env[62204]: value = "task-1200039" [ 909.908481] env[62204]: _type = "Task" [ 909.908481] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.917435] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200039, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.930850] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200031, 'name': PowerOffVM_Task, 'duration_secs': 0.323424} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.931130] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.931383] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.931558] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8ba12cd-4d5e-4098-b89c-a9010b09c348 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.995823] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200032, 'name': Rename_Task, 'duration_secs': 0.271859} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.995823] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.995823] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1efc2fa4-89fd-42fc-8436-9e1342078f6f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.004157] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 910.004157] env[62204]: value = "task-1200041" [ 910.004157] env[62204]: _type = "Task" [ 910.004157] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.009485] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.009768] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.010516] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Deleting the datastore file [datastore1] 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.010684] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c3c0346-f03c-4027-81be-6647668a2738 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.015953] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.019998] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 910.019998] env[62204]: value = "task-1200042" [ 910.019998] env[62204]: _type = "Task" [ 910.019998] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.029898] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.070298] env[62204]: DEBUG oslo_concurrency.lockutils [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.070298] env[62204]: DEBUG nova.compute.manager [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received event network-changed-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.070536] env[62204]: DEBUG nova.compute.manager [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing instance network info cache due to event network-changed-7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 910.070597] env[62204]: DEBUG oslo_concurrency.lockutils [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.071482] env[62204]: DEBUG oslo_concurrency.lockutils [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.071482] env[62204]: DEBUG nova.network.neutron [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing network info cache for port 7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 910.124252] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e54d44-797e-6c63-378d-887057dd9051, 'name': SearchDatastore_Task, 'duration_secs': 0.009848} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.125191] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a3e3ded-39b1-4a48-a26c-520ab758f99c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.131291] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 910.131291] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b9e7eb-c3c1-5b2e-4e12-8207dd71d623" [ 910.131291] env[62204]: _type = "Task" [ 910.131291] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.140715] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b9e7eb-c3c1-5b2e-4e12-8207dd71d623, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.271667] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fc733c55-ad21-435a-bd92-f167dff13c39 tempest-SecurityGroupsTestJSON-634231456 tempest-SecurityGroupsTestJSON-634231456-project-member] Lock "51c9e353-f2cf-41b4-b37e-1cfd5dca0518" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.744s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.323329] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Releasing lock "refresh_cache-2727dc46-98ed-435d-89ef-41bc20cda776" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.323653] env[62204]: DEBUG nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Received event network-vif-plugged-4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.323906] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Acquiring lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.324228] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.324386] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 910.324569] env[62204]: DEBUG nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] No waiting events found dispatching network-vif-plugged-4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 910.324785] env[62204]: WARNING nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Received unexpected event network-vif-plugged-4629653e-3fb2-4a37-b37d-0435f854c4bf for instance with vm_state building and task_state spawning. [ 910.324908] env[62204]: DEBUG nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Received event network-changed-4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 910.325083] env[62204]: DEBUG nova.compute.manager [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Refreshing instance network info cache due to event network-changed-4629653e-3fb2-4a37-b37d-0435f854c4bf. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 910.325281] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Acquiring lock "refresh_cache-7a0e579d-38e7-4f04-bf4d-1076dfc3b374" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.325428] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Acquired lock "refresh_cache-7a0e579d-38e7-4f04-bf4d-1076dfc3b374" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.325594] env[62204]: DEBUG nova.network.neutron [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Refreshing network info cache for port 4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 910.378512] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.422608] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200039, 'name': CreateVM_Task, 'duration_secs': 0.433982} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.427049] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.428412] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.428412] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.428852] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 910.431698] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a105aba-980c-49b3-b07f-78797b11b9d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.436506] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 910.436506] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d77c95-4102-07c2-8a68-3f0981569ce2" [ 910.436506] env[62204]: _type = "Task" [ 910.436506] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.447940] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d77c95-4102-07c2-8a68-3f0981569ce2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.518681] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200041, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.531773] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.617506] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471392ab-16f2-4509-9d75-efd0de2872b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.629317] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a426ee28-7614-404c-8d76-dcc1a50db2b9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.644710] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b9e7eb-c3c1-5b2e-4e12-8207dd71d623, 'name': SearchDatastore_Task, 'duration_secs': 0.057934} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.670803] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.671146] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. {{(pid=62204) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 910.671687] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e33f658-cfd9-420f-a5be-4d1558d57727 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.674472] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f149e6cc-21ef-4dd6-9fce-0e7a7f06c5f0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.685375] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa037ee-1e86-49f5-ac40-e21c2c8f8424 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.689771] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 910.689771] env[62204]: value = "task-1200043" [ 910.689771] env[62204]: _type = "Task" [ 910.689771] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.701474] env[62204]: DEBUG nova.compute.provider_tree [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.709416] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.882805] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.950303] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d77c95-4102-07c2-8a68-3f0981569ce2, 'name': SearchDatastore_Task, 'duration_secs': 0.091449} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.950653] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.950902] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.951166] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.951416] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.951535] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.951763] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7ff8d6a-59a9-4c85-88c8-47dd375f9627 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.970215] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.970215] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.971036] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29cd4051-bf1c-4fd1-9b29-f8d85500dcc4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.984068] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 910.984068] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c489e-b16c-9a77-27b0-167b38fe0f55" [ 910.984068] env[62204]: _type = "Task" [ 910.984068] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.996777] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c489e-b16c-9a77-27b0-167b38fe0f55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.022023] env[62204]: DEBUG oslo_vmware.api [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200041, 'name': PowerOnVM_Task, 'duration_secs': 0.889192} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.023343] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 911.023747] env[62204]: INFO nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Took 10.24 seconds to spawn the instance on the hypervisor. [ 911.024075] env[62204]: DEBUG nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 911.025643] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5431c8-3009-4d5c-9279-6505d1895ac3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.044779] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.090512] env[62204]: DEBUG nova.network.neutron [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updated VIF entry in instance network info cache for port 7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 911.091036] env[62204]: DEBUG nova.network.neutron [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.129729] env[62204]: DEBUG nova.network.neutron [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Updated VIF entry in instance network info cache for port 4629653e-3fb2-4a37-b37d-0435f854c4bf. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 911.130444] env[62204]: DEBUG nova.network.neutron [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Updating instance_info_cache with network_info: [{"id": "4629653e-3fb2-4a37-b37d-0435f854c4bf", "address": "fa:16:3e:26:4a:95", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4629653e-3f", "ovs_interfaceid": "4629653e-3fb2-4a37-b37d-0435f854c4bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.201029] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.205437] env[62204]: DEBUG nova.scheduler.client.report [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.380039] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.495471] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c489e-b16c-9a77-27b0-167b38fe0f55, 'name': SearchDatastore_Task, 'duration_secs': 0.08467} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.496455] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4849d00c-c81a-4976-9a72-146ee79b46e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.502868] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 911.502868] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522036ed-de35-b145-0d51-e8bb04994b62" [ 911.502868] env[62204]: _type = "Task" [ 911.502868] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.513208] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522036ed-de35-b145-0d51-e8bb04994b62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.534122] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.556998] env[62204]: INFO nova.compute.manager [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Took 51.79 seconds to build instance. [ 911.596638] env[62204]: DEBUG oslo_concurrency.lockutils [req-4be3e6a8-cd4c-4059-af97-5da2804529fa req-986261fb-e90e-4fec-950a-48eb09838dd1 service nova] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.633380] env[62204]: DEBUG oslo_concurrency.lockutils [req-00357d87-d506-4e23-8967-4089a8905d0d req-33346ce1-4c5c-489f-9773-e6747cc8fe71 service nova] Releasing lock "refresh_cache-7a0e579d-38e7-4f04-bf4d-1076dfc3b374" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.701669] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.711372] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.991s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.714363] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 22.342s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.742592] env[62204]: INFO nova.scheduler.client.report [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Deleted allocations for instance 211ca0c1-cf05-4148-ad5c-46cbbd72278e [ 911.882316] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.016594] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522036ed-de35-b145-0d51-e8bb04994b62, 'name': SearchDatastore_Task, 'duration_secs': 0.087033} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.017050] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.017947] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 7a0e579d-38e7-4f04-bf4d-1076dfc3b374/7a0e579d-38e7-4f04-bf4d-1076dfc3b374.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.019492] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8e94b17-574f-41b6-8e49-5e68aeb8f0e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.032895] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 912.032895] env[62204]: value = "task-1200045" [ 912.032895] env[62204]: _type = "Task" [ 912.032895] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.045777] env[62204]: DEBUG oslo_vmware.api [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.572034} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.047336] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 912.047661] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 912.047928] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 912.048208] env[62204]: INFO nova.compute.manager [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Took 2.66 seconds to destroy the instance on the hypervisor. [ 912.048608] env[62204]: DEBUG oslo.service.loopingcall [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.053170] env[62204]: DEBUG nova.compute.manager [-] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 912.053273] env[62204]: DEBUG nova.network.neutron [-] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 912.055658] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.059774] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3d7a6ce-f789-4092-ab4a-1c86c500be01 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.302s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.203509] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200043, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.220428] env[62204]: INFO nova.compute.claims [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.251503] env[62204]: DEBUG oslo_concurrency.lockutils [None req-491ce097-0c94-4133-9692-0aff07aad0f0 tempest-ServerGroupTestJSON-279080031 tempest-ServerGroupTestJSON-279080031-project-member] Lock "211ca0c1-cf05-4148-ad5c-46cbbd72278e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.984s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.381785] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.544731] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.704410] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200043, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.968687} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.704751] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. [ 912.705627] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea29de00-a25d-4c12-ae0b-0fc14a49ae6a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.728155] env[62204]: INFO nova.compute.resource_tracker [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating resource usage from migration 1ece4807-e6af-4d6e-b887-100f22af2351 [ 912.738611] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 912.740630] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7b29ede-5c85-41ea-b5f9-75fe9eec8a9c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.769091] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 912.769091] env[62204]: value = "task-1200046" [ 912.769091] env[62204]: _type = "Task" [ 912.769091] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.784082] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200046, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.889826] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200038, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.680317} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.894018] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137/OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137.vmdk to [datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk. [ 912.894018] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Cleaning up location [datastore1] OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 912.894018] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c72c4322-2e72-4eab-b91d-77f0b7fca137 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 912.894018] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a562c1d-0196-49b0-8b0e-9d05878f8565 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.899418] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 912.899418] env[62204]: value = "task-1200047" [ 912.899418] env[62204]: _type = "Task" [ 912.899418] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.913924] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.046459] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.967008} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.050266] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 7a0e579d-38e7-4f04-bf4d-1076dfc3b374/7a0e579d-38e7-4f04-bf4d-1076dfc3b374.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.050671] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.052776] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66207e16-c8dc-4b99-a3d9-b58f51cf53dc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.059543] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 913.059543] env[62204]: value = "task-1200048" [ 913.059543] env[62204]: _type = "Task" [ 913.059543] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.074872] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.187319] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286731a3-4f80-4e0a-a8b9-a4aee3e49c4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.198027] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88771161-4308-45e9-ab0e-443000a72597 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.231648] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733f2c5f-bb8b-4ed7-8ee6-950fc406b124 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.240360] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6198596-b1f4-4ee3-9642-6b23e7f82158 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.254842] env[62204]: DEBUG nova.compute.provider_tree [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.278531] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200046, 'name': ReconfigVM_Task, 'duration_secs': 0.393284} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.278846] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.279757] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e344047-fab2-4cff-b05c-134eb5ab4795 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.300172] env[62204]: DEBUG nova.network.neutron [-] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.309514] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80e6afc1-37cf-4a1e-90f1-ebce1aefec02 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.324093] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 913.324093] env[62204]: value = "task-1200049" [ 913.324093] env[62204]: _type = "Task" [ 913.324093] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.332876] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.415788] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102755} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.416215] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.416485] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.416875] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk to [datastore1] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.417271] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-181942a1-19bd-4a0e-a150-630aba72bcd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.426509] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 913.426509] env[62204]: value = "task-1200050" [ 913.426509] env[62204]: _type = "Task" [ 913.426509] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.438425] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.458808] env[62204]: DEBUG nova.compute.manager [req-1bb6c222-03e6-4d08-be3c-0cfca7aa7fed req-3469756e-530e-4a7e-b17b-17febd21a8fc service nova] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Received event network-vif-deleted-ac345dde-4672-4c9d-a224-24ebc7900628 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 913.578303] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077033} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.583162] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.583162] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26468bab-2cb8-4174-b20e-55795378ca5b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.614273] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 7a0e579d-38e7-4f04-bf4d-1076dfc3b374/7a0e579d-38e7-4f04-bf4d-1076dfc3b374.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.615103] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e58d0f7-538f-4884-8e40-dd0806349022 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.637270] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 913.637270] env[62204]: value = "task-1200051" [ 913.637270] env[62204]: _type = "Task" [ 913.637270] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.645958] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200051, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.757994] env[62204]: DEBUG nova.scheduler.client.report [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 913.809264] env[62204]: INFO nova.compute.manager [-] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Took 1.75 seconds to deallocate network for instance. [ 913.839328] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200049, 'name': ReconfigVM_Task, 'duration_secs': 0.164468} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.839328] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.839328] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e67fe05b-3f76-4c06-aaab-77d55657ffb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.845131] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 913.845131] env[62204]: value = "task-1200052" [ 913.845131] env[62204]: _type = "Task" [ 913.845131] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.857751] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.939977] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.960393] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 913.960665] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260098', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'name': 'volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4dc4546f-85e6-4259-9ccd-a7396669eace', 'attached_at': '', 'detached_at': '', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'serial': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 913.961629] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a6a60ef-b4a2-4149-8551-8a1675d811d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.983408] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb6b9fd-693e-470e-9836-3812d0567e26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.018207] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047/volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.018718] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b31a827-b930-4056-87fb-a2e6be8e8450 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.040377] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 914.040377] env[62204]: value = "task-1200053" [ 914.040377] env[62204]: _type = "Task" [ 914.040377] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.050997] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.150295] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.263703] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.549s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.263946] env[62204]: INFO nova.compute.manager [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Migrating [ 914.271189] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.839s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.271503] env[62204]: DEBUG nova.objects.instance [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'resources' on Instance uuid 62605b48-e640-4b4d-ab77-1ed44a75daa3 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.325489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.357274] env[62204]: DEBUG nova.compute.manager [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 914.357520] env[62204]: DEBUG nova.compute.manager [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing instance network info cache due to event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 914.358474] env[62204]: DEBUG oslo_concurrency.lockutils [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.358474] env[62204]: DEBUG oslo_concurrency.lockutils [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.360095] env[62204]: DEBUG nova.network.neutron [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 914.371460] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200052, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.439777] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.550045] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.651075] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200051, 'name': ReconfigVM_Task, 'duration_secs': 0.725622} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.651075] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 7a0e579d-38e7-4f04-bf4d-1076dfc3b374/7a0e579d-38e7-4f04-bf4d-1076dfc3b374.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.651075] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdb0c89f-14b8-4b95-bd4a-9e3754f58abe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.660021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.660021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.660021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 914.660021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.660021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.661833] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 914.661833] env[62204]: value = "task-1200054" [ 914.661833] env[62204]: _type = "Task" [ 914.661833] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.666703] env[62204]: INFO nova.compute.manager [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Terminating instance [ 914.669132] env[62204]: DEBUG nova.compute.manager [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 914.669351] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.670276] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbfcbd3-ac30-4bd2-880b-4b03f9451a84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.680759] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200054, 'name': Rename_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.688022] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.688022] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-203f39cb-c4c4-4a9b-889e-385c506e5a94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.692540] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 914.692540] env[62204]: value = "task-1200055" [ 914.692540] env[62204]: _type = "Task" [ 914.692540] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.709429] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.786980] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 914.786980] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.786980] env[62204]: DEBUG nova.network.neutron [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 914.861198] env[62204]: DEBUG oslo_vmware.api [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200052, 'name': PowerOnVM_Task, 'duration_secs': 0.641165} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.866524] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.871760] env[62204]: DEBUG nova.compute.manager [None req-57e32ca1-094f-4f7b-b0bc-2353266c70be tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 914.872972] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50170099-1429-4100-9c09-a2fa835aa656 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.942350] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.063178] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.175860] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200054, 'name': Rename_Task, 'duration_secs': 0.27036} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.176351] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.176646] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-325d4a51-92e1-4599-b777-914e64a7ec8b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.183825] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 915.183825] env[62204]: value = "task-1200056" [ 915.183825] env[62204]: _type = "Task" [ 915.183825] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.198617] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.199485] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709fff4e-569c-4080-8a9d-c1b94ace181b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.217583] env[62204]: DEBUG nova.network.neutron [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updated VIF entry in instance network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 915.217583] env[62204]: DEBUG nova.network.neutron [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.223319] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db945da2-1f58-4aa2-962e-ea48d3e4565a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.227369] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200055, 'name': PowerOffVM_Task, 'duration_secs': 0.291844} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.228060] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.228325] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.229161] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c265302e-edfe-41cd-8e67-c019a06fca67 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.267395] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d706ffc9-45c3-47bc-972c-230e5c5d6e06 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.277401] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1785de4-b7d2-4265-a671-ef49438bc7b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.299859] env[62204]: DEBUG nova.compute.provider_tree [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.340075] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.340412] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.340618] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Deleting the datastore file [datastore1] 431e7b20-22d8-4742-9c47-cdf9ee08fb32 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.340946] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f2cf31f-b83a-4715-83fc-09138e53db39 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.348076] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 915.348076] env[62204]: value = "task-1200058" [ 915.348076] env[62204]: _type = "Task" [ 915.348076] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.357627] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.414373] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.414779] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.440815] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.558027] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200053, 'name': ReconfigVM_Task, 'duration_secs': 1.318329} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.558027] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Reconfigured VM instance instance-0000004d to attach disk [datastore2] volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047/volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.564146] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a13743fd-ec47-4fc6-8dd3-2f477713bb9e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.592516] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 915.592516] env[62204]: value = "task-1200059" [ 915.592516] env[62204]: _type = "Task" [ 915.592516] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.605383] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200059, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.642149] env[62204]: DEBUG nova.network.neutron [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance_info_cache with network_info: [{"id": "830a7992-393b-4d36-82d8-b660d6904ae7", "address": "fa:16:3e:01:7a:45", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap830a7992-39", "ovs_interfaceid": "830a7992-393b-4d36-82d8-b660d6904ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.695019] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200056, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.728897] env[62204]: DEBUG oslo_concurrency.lockutils [req-f51633a5-752b-49c0-acbd-98c388e31970 req-e666a5aa-9031-43bb-a466-1810f53b69f1 service nova] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 915.803196] env[62204]: DEBUG nova.scheduler.client.report [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 915.860300] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.917509] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 915.949590] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.103371] env[62204]: DEBUG oslo_vmware.api [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200059, 'name': ReconfigVM_Task, 'duration_secs': 0.472265} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.103687] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260098', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'name': 'volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4dc4546f-85e6-4259-9ccd-a7396669eace', 'attached_at': '', 'detached_at': '', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'serial': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 916.145077] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 916.195581] env[62204]: DEBUG oslo_vmware.api [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200056, 'name': PowerOnVM_Task, 'duration_secs': 0.800715} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.195815] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.196048] env[62204]: INFO nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Took 9.42 seconds to spawn the instance on the hypervisor. [ 916.196295] env[62204]: DEBUG nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 916.197378] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3322d43-9b14-4b37-8fca-9dc93b1c726f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.315025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.315025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 20.059s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.351019] env[62204]: INFO nova.scheduler.client.report [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocations for instance 62605b48-e640-4b4d-ab77-1ed44a75daa3 [ 916.366475] env[62204]: DEBUG oslo_vmware.api [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.781518} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.366921] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.367245] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.367679] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.367981] env[62204]: INFO nova.compute.manager [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Took 1.70 seconds to destroy the instance on the hypervisor. [ 916.368347] env[62204]: DEBUG oslo.service.loopingcall [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.368678] env[62204]: DEBUG nova.compute.manager [-] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 916.368902] env[62204]: DEBUG nova.network.neutron [-] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 916.443250] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.448170] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200050, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.623088} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.448608] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c2930f97-8cc3-4e0b-b082-ac3975f12ee6/c2930f97-8cc3-4e0b-b082-ac3975f12ee6.vmdk to [datastore1] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.449583] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586013e2-5db7-4863-9d0d-1d21b2128110 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.475871] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 916.476859] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7092c2bf-57b9-47ef-8e54-0e7da9a43d73 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.504426] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 916.504426] env[62204]: value = "task-1200060" [ 916.504426] env[62204]: _type = "Task" [ 916.504426] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.517611] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200060, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.717588] env[62204]: INFO nova.compute.manager [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Took 54.95 seconds to build instance. [ 916.864683] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4a226cc5-f27e-4022-b0df-a27cdf42ffcb tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "62605b48-e640-4b4d-ab77-1ed44a75daa3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.396s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.999868] env[62204]: INFO nova.compute.manager [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Unrescuing [ 917.000373] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.000670] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquired lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.001208] env[62204]: DEBUG nova.network.neutron [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.020227] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.090118] env[62204]: DEBUG nova.compute.manager [req-475ed3df-b598-40f5-a4ee-f055f503b409 req-7862c4f8-2b9a-4a9b-b93f-5d862b2127cf service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Received event network-vif-deleted-71d3f404-a411-4f5e-93a0-ceb7817ec80b {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.090118] env[62204]: INFO nova.compute.manager [req-475ed3df-b598-40f5-a4ee-f055f503b409 req-7862c4f8-2b9a-4a9b-b93f-5d862b2127cf service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Neutron deleted interface 71d3f404-a411-4f5e-93a0-ceb7817ec80b; detaching it from the instance and deleting it from the info cache [ 917.090402] env[62204]: DEBUG nova.network.neutron [req-475ed3df-b598-40f5-a4ee-f055f503b409 req-7862c4f8-2b9a-4a9b-b93f-5d862b2127cf service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.146592] env[62204]: DEBUG nova.network.neutron [-] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.159912] env[62204]: DEBUG nova.objects.instance [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'flavor' on Instance uuid 4dc4546f-85e6-4259-9ccd-a7396669eace {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.220188] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4d477287-63e9-43f3-b22d-9fd613f1c21c tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.689s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.332657] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Applying migration context for instance 0a4a432d-a71a-4da7-be90-25dcec5a64c6 as it has an incoming, in-progress migration ac075a5b-5a18-471b-b2b2-598795397010. Migration status is confirming {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 917.332894] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Applying migration context for instance 7b7032a8-8093-43fb-b2e2-c6308d96e819 as it has an incoming, in-progress migration 1ece4807-e6af-4d6e-b887-100f22af2351. Migration status is migrating {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 917.335153] env[62204]: INFO nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating resource usage from migration ac075a5b-5a18-471b-b2b2-598795397010 [ 917.335474] env[62204]: INFO nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating resource usage from migration 1ece4807-e6af-4d6e-b887-100f22af2351 [ 917.357476] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 917.357644] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance c0990e53-70c9-4536-b26a-bc00bd457c56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.357772] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.357891] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance f5f0c15f-ae0d-4615-93ab-3203a5d7e090 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358063] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6dc170a4-b08e-44b5-a152-832670e6866b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358194] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2b728904-19ef-4773-9260-c615da522801 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358312] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7412d7ef-b370-4253-8d57-d2bd5d06d6a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358426] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 431e7b20-22d8-4742-9c47-cdf9ee08fb32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358536] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 98805916-8501-4afb-9e1c-a5393f6e5557 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358764] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4dc4546f-85e6-4259-9ccd-a7396669eace actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358764] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2727dc46-98ed-435d-89ef-41bc20cda776 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358897] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance bd0f87d1-e53a-4433-afc6-6aea7e68d6f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.358951] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance f445a8ea-ff21-44e9-8389-231a03c51650 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.359192] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Migration ac075a5b-5a18-471b-b2b2-598795397010 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 917.359330] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 0a4a432d-a71a-4da7-be90-25dcec5a64c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.359558] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.359728] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance d97d792d-614f-42e3-8516-6c0a7cf15ad5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.359851] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 274285e5-fc23-48b4-b0d6-5a67bc764d78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.359962] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7a0e579d-38e7-4f04-bf4d-1076dfc3b374 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.360084] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Migration 1ece4807-e6af-4d6e-b887-100f22af2351 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 917.360194] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7b7032a8-8093-43fb-b2e2-c6308d96e819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 917.518524] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200060, 'name': ReconfigVM_Task, 'duration_secs': 0.911623} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.518524] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 2727dc46-98ed-435d-89ef-41bc20cda776/2727dc46-98ed-435d-89ef-41bc20cda776.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.519106] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_options': None, 'boot_index': 0, 'guest_format': None, 'disk_bus': None, 'device_name': '/dev/sda', 'encrypted': False, 'encryption_format': None, 'device_type': 'disk', 'encryption_secret_uuid': None, 'size': 0, 'image_id': 'c0e4d3a1-f965-49e2-ab05-fbf425872dcc'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'mount_device': '/dev/sdb', 'boot_index': None, 'guest_format': None, 'attachment_id': 'ae549a33-54e4-46fb-b34a-cadd17915d2b', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260068', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'name': 'volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2727dc46-98ed-435d-89ef-41bc20cda776', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'serial': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272'}, 'disk_bus': None, 'device_type': None, 'volume_type': None}], 'swap': None} {{(pid=62204) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 917.519330] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 917.519532] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260068', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'name': 'volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2727dc46-98ed-435d-89ef-41bc20cda776', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'serial': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 917.521203] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b9157f-5a37-41a7-a79e-d61b0bad689d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.538057] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c0ddbb-a757-47f1-afdd-9af8af33216a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.564530] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272/volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.567073] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeaaefb6-d651-4576-a796-132e9b476f5a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.586902] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 917.586902] env[62204]: value = "task-1200061" [ 917.586902] env[62204]: _type = "Task" [ 917.586902] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.594562] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200061, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.594798] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96c561de-848f-470f-914d-f4200e6b9851 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.602975] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c8a564-f73f-41a2-a45b-c7a55651d921 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.639311] env[62204]: DEBUG nova.compute.manager [req-475ed3df-b598-40f5-a4ee-f055f503b409 req-7862c4f8-2b9a-4a9b-b93f-5d862b2127cf service nova] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Detach interface failed, port_id=71d3f404-a411-4f5e-93a0-ceb7817ec80b, reason: Instance 431e7b20-22d8-4742-9c47-cdf9ee08fb32 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 917.648686] env[62204]: INFO nova.compute.manager [-] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Took 1.28 seconds to deallocate network for instance. [ 917.661358] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe38a9a8-ecaf-4026-9e95-66350188f7b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.666883] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e51a71d9-efde-4dfb-a80b-57498b31907b tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.368s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.683993] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 917.814603] env[62204]: DEBUG nova.network.neutron [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updating instance_info_cache with network_info: [{"id": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "address": "fa:16:3e:6d:c4:e1", "network": {"id": "9b2eaa21-fa75-417b-8af1-754e25729d68", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1173392259-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ec87b51786754b05aa75abb818bdbc15", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d7b5f1ef-d4b9-4ec3-b047-17e4cb349d25", "external-id": "nsx-vlan-transportzone-743", "segmentation_id": 743, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07c8c254-5b", "ovs_interfaceid": "07c8c254-5b9d-40bf-820d-9474821ecfd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.866748] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 918.098938] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.160078] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.191344] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.191344] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de4b5f43-f1f4-414e-981a-9ceabdb770f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.202718] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 918.202718] env[62204]: value = "task-1200062" [ 918.202718] env[62204]: _type = "Task" [ 918.202718] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.212175] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.317860] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Releasing lock "refresh_cache-259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.318659] env[62204]: DEBUG nova.objects.instance [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lazy-loading 'flavor' on Instance uuid 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.372468] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 7671c77f-3da8-4a41-a472-138c7bd23a92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 918.374922] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 20 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 918.374922] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4480MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=20 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 918.424546] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2dbe1cd0-b15e-479d-b344-5ad1dbfcee71 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "7a0bd008-f635-4d21-ba73-9640dbe37a48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.424891] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2dbe1cd0-b15e-479d-b344-5ad1dbfcee71 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "7a0bd008-f635-4d21-ba73-9640dbe37a48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.602575] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200061, 'name': ReconfigVM_Task, 'duration_secs': 0.683479} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.603182] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfigured VM instance instance-00000034 to attach disk [datastore2] volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272/volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.613549] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24fd0b33-50f6-48ce-b373-38d582cdf3f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.632255] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 918.632255] env[62204]: value = "task-1200063" [ 918.632255] env[62204]: _type = "Task" [ 918.632255] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.644784] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200063, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.717543] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200062, 'name': PowerOffVM_Task, 'duration_secs': 0.282898} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.717942] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.718383] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 918.722740] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.723319] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.723562] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.723826] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.724033] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.726414] env[62204]: INFO nova.compute.manager [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Terminating instance [ 918.734927] env[62204]: DEBUG nova.compute.manager [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 918.734927] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.734927] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c74aa49-e555-4743-8112-5fb7700eca11 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.743373] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.743373] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3aa58eb9-dda3-4299-9923-91d2d50eefae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.751220] env[62204]: DEBUG oslo_vmware.api [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 918.751220] env[62204]: value = "task-1200064" [ 918.751220] env[62204]: _type = "Task" [ 918.751220] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.760665] env[62204]: DEBUG oslo_vmware.api [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.826576] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a341853-fef2-4533-a7db-d4d54edf6004 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.830353] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164e3b41-b3f7-4795-9d5a-8518fb59a95c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.853076] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a997d9f-e3d3-41c1-bfdd-f25f38627682 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.858342] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.858679] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57c87038-863c-44dd-be8f-bd314cd6ebb6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.890167] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20b39a7-e3aa-45a8-8375-b767529db19c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.892950] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 918.892950] env[62204]: value = "task-1200065" [ 918.892950] env[62204]: _type = "Task" [ 918.892950] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.900851] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278a8db5-7c44-4863-a7e5-4269ae2668bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.907640] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.917823] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.929543] env[62204]: DEBUG nova.compute.manager [None req-2dbe1cd0-b15e-479d-b344-5ad1dbfcee71 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 7a0bd008-f635-4d21-ba73-9640dbe37a48] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 919.146424] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200063, 'name': ReconfigVM_Task, 'duration_secs': 0.286113} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.146757] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260068', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'name': 'volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2727dc46-98ed-435d-89ef-41bc20cda776', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'serial': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 919.147362] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57ba3c32-76ef-45b4-84ef-d5021cb0e9a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.153934] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 919.153934] env[62204]: value = "task-1200066" [ 919.153934] env[62204]: _type = "Task" [ 919.153934] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.165151] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200066, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.229224] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 919.229359] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 919.229444] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.229812] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 919.229812] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.229955] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 919.230572] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 919.230572] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 919.230572] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 919.230817] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 919.230817] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 919.236488] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8edd2ed-83da-4177-ade7-6a317ae241d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.260050] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 919.260050] env[62204]: value = "task-1200067" [ 919.260050] env[62204]: _type = "Task" [ 919.260050] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.264579] env[62204]: DEBUG oslo_vmware.api [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200064, 'name': PowerOffVM_Task, 'duration_secs': 0.223941} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.265474] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.265839] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.266208] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a04e2e0-cf4d-4c29-a016-30cf0b1a59ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.272688] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200067, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.403389] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200065, 'name': PowerOffVM_Task, 'duration_secs': 0.212589} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.404076] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 919.410662] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 919.411229] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd0a81f7-37f3-4d03-87c8-ae9894eb58a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.426058] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 919.434182] env[62204]: DEBUG nova.compute.manager [None req-2dbe1cd0-b15e-479d-b344-5ad1dbfcee71 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 7a0bd008-f635-4d21-ba73-9640dbe37a48] Instance disappeared before build. {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 919.437011] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 919.437011] env[62204]: value = "task-1200069" [ 919.437011] env[62204]: _type = "Task" [ 919.437011] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.446128] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200069, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.519910] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.519910] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.519910] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore1] 7a0e579d-38e7-4f04-bf4d-1076dfc3b374 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.519910] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8fd8261-1d83-484a-af40-946aa3c9527d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.527132] env[62204]: DEBUG oslo_vmware.api [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 919.527132] env[62204]: value = "task-1200070" [ 919.527132] env[62204]: _type = "Task" [ 919.527132] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.536016] env[62204]: DEBUG oslo_vmware.api [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200070, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.663745] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200066, 'name': Rename_Task, 'duration_secs': 0.286913} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.664045] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.664365] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de8250b9-f86c-4259-bd24-0c0ba195d5c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.670080] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 919.670080] env[62204]: value = "task-1200071" [ 919.670080] env[62204]: _type = "Task" [ 919.670080] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.677786] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.768395] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200067, 'name': ReconfigVM_Task, 'duration_secs': 0.236222} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.768739] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 919.938197] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 919.938483] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.624s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.941918] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.757s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.954356] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200069, 'name': ReconfigVM_Task, 'duration_secs': 0.213119} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.954663] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 919.954856] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.955217] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c43e4c7-82f7-4ca4-9690-8c00784f8a22 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.960974] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 919.960974] env[62204]: value = "task-1200072" [ 919.960974] env[62204]: _type = "Task" [ 919.960974] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.965913] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2dbe1cd0-b15e-479d-b344-5ad1dbfcee71 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "7a0bd008-f635-4d21-ba73-9640dbe37a48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.541s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.972323] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200072, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.041907] env[62204]: DEBUG oslo_vmware.api [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200070, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309521} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.042304] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.042567] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.043052] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.043052] env[62204]: INFO nova.compute.manager [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Took 1.31 seconds to destroy the instance on the hypervisor. [ 920.043409] env[62204]: DEBUG oslo.service.loopingcall [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.043778] env[62204]: DEBUG nova.compute.manager [-] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 920.043778] env[62204]: DEBUG nova.network.neutron [-] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.182360] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200071, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.269346] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.269803] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.274549] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 920.274780] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 920.274940] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.275143] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 920.275297] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.275449] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 920.275683] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 920.275817] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 920.275986] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 920.276459] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 920.276613] env[62204]: DEBUG nova.virt.hardware [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 920.283587] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Reconfiguring VM instance instance-00000051 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 920.284997] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5027b161-cd57-4d2a-a7f0-f7ff205eb528 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.308812] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 920.308812] env[62204]: value = "task-1200073" [ 920.308812] env[62204]: _type = "Task" [ 920.308812] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.318727] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.472036] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200072, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.474302] env[62204]: DEBUG nova.compute.manager [req-f4c3c234-1539-4f3d-bb97-638792c6e37a req-74e15b69-502c-4b6f-b139-4522963b440d service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Received event network-vif-deleted-4629653e-3fb2-4a37-b37d-0435f854c4bf {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.474494] env[62204]: INFO nova.compute.manager [req-f4c3c234-1539-4f3d-bb97-638792c6e37a req-74e15b69-502c-4b6f-b139-4522963b440d service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Neutron deleted interface 4629653e-3fb2-4a37-b37d-0435f854c4bf; detaching it from the instance and deleting it from the info cache [ 920.474667] env[62204]: DEBUG nova.network.neutron [req-f4c3c234-1539-4f3d-bb97-638792c6e37a req-74e15b69-502c-4b6f-b139-4522963b440d service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.617698] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 920.617762] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 920.683904] env[62204]: DEBUG oslo_vmware.api [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200071, 'name': PowerOnVM_Task, 'duration_secs': 0.567113} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.684441] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.772212] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 920.819057] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200073, 'name': ReconfigVM_Task, 'duration_secs': 0.257039} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.822433] env[62204]: DEBUG nova.compute.manager [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 920.823620] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Reconfigured VM instance instance-00000051 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 920.824637] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e123e20-ab9f-4991-a9db-e8b1f4488e84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.827993] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24c2f75-1976-4497-9c5b-8381558a5f6a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.851823] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3738e04-a66a-49af-b5f6-1a706329414a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.866116] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819/7b7032a8-8093-43fb-b2e2-c6308d96e819.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.866116] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1b68862-952a-459f-81b7-70ff36dd8722 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.884685] env[62204]: DEBUG nova.network.neutron [-] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.889378] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d9fc55-6486-4c2a-980e-6a02cdfbbcf8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.894546] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 920.894546] env[62204]: value = "task-1200074" [ 920.894546] env[62204]: _type = "Task" [ 920.894546] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.929181] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd74ccf0-428d-4219-9bf4-35c4172c3479 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.935907] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200074, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.941443] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080a5f52-9c09-4a78-a6ec-bb6ff8d623a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.956201] env[62204]: DEBUG nova.compute.provider_tree [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.970246] env[62204]: DEBUG oslo_vmware.api [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200072, 'name': PowerOnVM_Task, 'duration_secs': 0.853078} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.971133] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.971384] env[62204]: DEBUG nova.compute.manager [None req-6b4d3cb0-eebb-4dde-88b0-6ab53e068e18 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 920.972157] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c08b63f-fb78-4f7b-a8cc-edb7b9726ac1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.981466] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ffac2df-e7c5-4c83-8825-c1096a3c574a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.990758] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e749fc3-311b-41b0-ad4b-c1539e258ed2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.031388] env[62204]: DEBUG nova.compute.manager [req-f4c3c234-1539-4f3d-bb97-638792c6e37a req-74e15b69-502c-4b6f-b139-4522963b440d service nova] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Detach interface failed, port_id=4629653e-3fb2-4a37-b37d-0435f854c4bf, reason: Instance 7a0e579d-38e7-4f04-bf4d-1076dfc3b374 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 921.120139] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 921.299754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.351680] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50cfef28-71ac-4639-8e70-2aa27bb469fc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 68.573s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 921.398023] env[62204]: INFO nova.compute.manager [-] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Took 1.35 seconds to deallocate network for instance. [ 921.408162] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200074, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.461951] env[62204]: DEBUG nova.scheduler.client.report [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 921.641238] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.906075] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 921.906414] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200074, 'name': ReconfigVM_Task, 'duration_secs': 0.520896} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.906678] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819/7b7032a8-8093-43fb-b2e2-c6308d96e819.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.906951] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.413343] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bc056c-68e0-4b5c-8499-63529cded759 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.432936] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9946e9-3515-4233-8356-dab1f0bdaafe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.450633] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 922.472321] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.531s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.472553] env[62204]: DEBUG nova.compute.manager [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62204) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4909}} [ 922.475856] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.744s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.477399] env[62204]: INFO nova.compute.claims [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.616554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.616554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.616554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.616554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.616554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.618805] env[62204]: INFO nova.compute.manager [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Terminating instance [ 922.620812] env[62204]: DEBUG nova.compute.manager [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 922.621014] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.621844] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43147a7-eea3-4960-88bf-7fe3d43b203d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.629742] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.629974] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-298652b4-3a72-4293-88f4-78657e2af847 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.636427] env[62204]: DEBUG oslo_vmware.api [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 922.636427] env[62204]: value = "task-1200075" [ 922.636427] env[62204]: _type = "Task" [ 922.636427] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.643765] env[62204]: DEBUG oslo_vmware.api [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.003760] env[62204]: DEBUG nova.network.neutron [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Port 830a7992-393b-4d36-82d8-b660d6904ae7 binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 923.044316] env[62204]: INFO nova.scheduler.client.report [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted allocation for migration ac075a5b-5a18-471b-b2b2-598795397010 [ 923.145965] env[62204]: DEBUG oslo_vmware.api [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200075, 'name': PowerOffVM_Task, 'duration_secs': 0.189274} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.146313] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 923.146494] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 923.146776] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9476d3d9-2ff0-40be-9458-09eeb15901a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.215674] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 923.215918] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 923.216124] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Deleting the datastore file [datastore1] 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 923.216469] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-824f0638-a7d7-4fbb-a232-83dd82854810 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.223725] env[62204]: DEBUG oslo_vmware.api [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 923.223725] env[62204]: value = "task-1200077" [ 923.223725] env[62204]: _type = "Task" [ 923.223725] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.231794] env[62204]: DEBUG oslo_vmware.api [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.517330] env[62204]: DEBUG nova.objects.instance [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'flavor' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.553734] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5c53a873-cbb9-440b-a348-4e82ab468b28 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 29.932s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 923.734343] env[62204]: DEBUG oslo_vmware.api [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174225} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.737012] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.737240] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.737465] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.737639] env[62204]: INFO nova.compute.manager [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 923.737884] env[62204]: DEBUG oslo.service.loopingcall [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 923.739325] env[62204]: DEBUG nova.compute.manager [-] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 923.739325] env[62204]: DEBUG nova.network.neutron [-] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 923.812271] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8ae513-99ce-4e75-afc1-6d9305925552 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.823631] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b37f63-5186-4479-ab62-718832c99bb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.858493] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29e4f43-b87e-4f74-be81-71273a274451 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.865763] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae63ff0-8796-4f20-8854-27912342205f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.878800] env[62204]: DEBUG nova.compute.provider_tree [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.031352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 924.031619] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.031767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.032710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.032870] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.033055] env[62204]: DEBUG nova.network.neutron [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 924.033239] env[62204]: DEBUG nova.objects.instance [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'info_cache' on Instance uuid 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.035516] env[62204]: DEBUG nova.compute.manager [req-b07290b6-9092-4010-a2f2-1e1e80b984c4 req-5f8f0674-b4e5-4904-a355-70c9592d5d1d service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Received event network-vif-deleted-07c8c254-5b9d-40bf-820d-9474821ecfd6 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 924.035704] env[62204]: INFO nova.compute.manager [req-b07290b6-9092-4010-a2f2-1e1e80b984c4 req-5f8f0674-b4e5-4904-a355-70c9592d5d1d service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Neutron deleted interface 07c8c254-5b9d-40bf-820d-9474821ecfd6; detaching it from the instance and deleting it from the info cache [ 924.035871] env[62204]: DEBUG nova.network.neutron [req-b07290b6-9092-4010-a2f2-1e1e80b984c4 req-5f8f0674-b4e5-4904-a355-70c9592d5d1d service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.382975] env[62204]: DEBUG nova.scheduler.client.report [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.502518] env[62204]: DEBUG nova.network.neutron [-] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.538638] env[62204]: DEBUG nova.objects.base [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Object Instance<0a4a432d-a71a-4da7-be90-25dcec5a64c6> lazy-loaded attributes: flavor,info_cache {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 924.540176] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bee7d068-adcc-4c64-8d42-03cc9f6d0cf9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.550491] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca76ce2-e238-43ae-811f-e5c6659a8339 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.589018] env[62204]: DEBUG nova.compute.manager [req-b07290b6-9092-4010-a2f2-1e1e80b984c4 req-5f8f0674-b4e5-4904-a355-70c9592d5d1d service nova] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Detach interface failed, port_id=07c8c254-5b9d-40bf-820d-9474821ecfd6, reason: Instance 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 924.887570] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.888159] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 924.891112] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.566s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.891380] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.894019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.453s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.895528] env[62204]: INFO nova.compute.claims [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.919126] env[62204]: INFO nova.scheduler.client.report [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Deleted allocations for instance 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f [ 925.005252] env[62204]: INFO nova.compute.manager [-] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Took 1.27 seconds to deallocate network for instance. [ 925.067296] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.067581] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.067765] env[62204]: DEBUG nova.network.neutron [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 925.340497] env[62204]: DEBUG nova.network.neutron [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [{"id": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "address": "fa:16:3e:b6:54:e6", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccf86a68-c5", "ovs_interfaceid": "ccf86a68-c525-4b8b-940f-b0a08f2d3831", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.400360] env[62204]: DEBUG nova.compute.utils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 925.403712] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 925.403823] env[62204]: DEBUG nova.network.neutron [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 925.425830] env[62204]: DEBUG oslo_concurrency.lockutils [None req-711574b1-f441-44fa-b4dd-dd773cdd7861 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "4793e9fd-be87-4885-8f0e-1fcef6ce4d2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.046s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.441012] env[62204]: DEBUG nova.policy [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '370d4b8a24b84bf0a626d056c7758863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb9a24ef26c74781a2ad36e3430ce630', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 925.511821] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.798495] env[62204]: DEBUG nova.network.neutron [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance_info_cache with network_info: [{"id": "830a7992-393b-4d36-82d8-b660d6904ae7", "address": "fa:16:3e:01:7a:45", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap830a7992-39", "ovs_interfaceid": "830a7992-393b-4d36-82d8-b660d6904ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.843863] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-0a4a432d-a71a-4da7-be90-25dcec5a64c6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 925.893982] env[62204]: DEBUG nova.network.neutron [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Successfully created port: 5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.904739] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 926.236825] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0848c80f-7149-401c-9034-f431ac60efb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.246258] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55cb9b7-aa2d-422e-8b22-245c14ff622b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.286512] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bcba69-9a01-4738-87d9-fbff71e9af8a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.295948] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96a24fd-2319-4c3a-9c12-cfae9dc6c0ab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.300718] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.313388] env[62204]: DEBUG nova.compute.provider_tree [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.347033] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 926.347725] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b51b605-93c7-4879-85e5-3979c8071d8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.355338] env[62204]: DEBUG oslo_vmware.api [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 926.355338] env[62204]: value = "task-1200078" [ 926.355338] env[62204]: _type = "Task" [ 926.355338] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.364456] env[62204]: DEBUG oslo_vmware.api [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.820633] env[62204]: DEBUG nova.scheduler.client.report [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.823982] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.824266] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.824490] env[62204]: INFO nova.compute.manager [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Shelving [ 926.850794] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3d26f4-e037-46df-85dd-fe92276d181c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.884112] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8abb307-b012-4f4a-b8ac-5589fa8f6de6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.886992] env[62204]: DEBUG oslo_vmware.api [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200078, 'name': PowerOnVM_Task, 'duration_secs': 0.38125} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.887304] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 926.887473] env[62204]: DEBUG nova.compute.manager [None req-a4324009-a344-4c94-b7c0-9d1ef3ad0e7f tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 926.888567] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebd6bef3-4089-4081-b370-adb0c39de67b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.893427] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.917234] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 926.951896] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d59f28a690d92223cba86f8e8c4102d4',container_format='bare',created_at=2024-10-08T23:42:41Z,direct_url=,disk_format='vmdk',id=a9e952fa-67fa-4a49-a75c-594f33aa3496,min_disk=1,min_ram=0,name='tempest-test-snap-266619502',owner='fb9a24ef26c74781a2ad36e3430ce630',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-08T23:42:55Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.952251] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.952424] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.952614] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.952764] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.952915] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.953155] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.953436] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.953549] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.953640] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.953816] env[62204]: DEBUG nova.virt.hardware [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.954968] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd53e6a-bef8-4544-b349-5b2dead3e963 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.962860] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13233c86-83cf-48ff-a9e1-1abe8f680939 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.331128] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.331686] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 927.336342] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.177s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.336508] env[62204]: DEBUG nova.objects.instance [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lazy-loading 'resources' on Instance uuid 431e7b20-22d8-4742-9c47-cdf9ee08fb32 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.339369] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.339609] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a718bcb8-87f2-46b7-8561-8b9aaec4a607 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.348657] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 927.348657] env[62204]: value = "task-1200079" [ 927.348657] env[62204]: _type = "Task" [ 927.348657] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.357101] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200079, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.399901] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.400645] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd14bcda-4e38-492e-be79-2add7b50de07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.411218] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 927.411218] env[62204]: value = "task-1200080" [ 927.411218] env[62204]: _type = "Task" [ 927.411218] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.419347] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200080, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.695172] env[62204]: DEBUG nova.compute.manager [req-4d75b2e8-0692-420b-be44-f14f663f1f9d req-8327551f-90f5-45a9-92ae-124d4b493809 service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Received event network-vif-plugged-5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.695513] env[62204]: DEBUG oslo_concurrency.lockutils [req-4d75b2e8-0692-420b-be44-f14f663f1f9d req-8327551f-90f5-45a9-92ae-124d4b493809 service nova] Acquiring lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.695705] env[62204]: DEBUG oslo_concurrency.lockutils [req-4d75b2e8-0692-420b-be44-f14f663f1f9d req-8327551f-90f5-45a9-92ae-124d4b493809 service nova] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 927.695825] env[62204]: DEBUG oslo_concurrency.lockutils [req-4d75b2e8-0692-420b-be44-f14f663f1f9d req-8327551f-90f5-45a9-92ae-124d4b493809 service nova] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.696095] env[62204]: DEBUG nova.compute.manager [req-4d75b2e8-0692-420b-be44-f14f663f1f9d req-8327551f-90f5-45a9-92ae-124d4b493809 service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] No waiting events found dispatching network-vif-plugged-5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 927.696288] env[62204]: WARNING nova.compute.manager [req-4d75b2e8-0692-420b-be44-f14f663f1f9d req-8327551f-90f5-45a9-92ae-124d4b493809 service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Received unexpected event network-vif-plugged-5f278903-c85c-4f50-82ed-edfb3fb819c1 for instance with vm_state building and task_state spawning. [ 927.838976] env[62204]: DEBUG nova.compute.utils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 927.841327] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 927.841327] env[62204]: DEBUG nova.network.neutron [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 927.864675] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200079, 'name': PowerOffVM_Task, 'duration_secs': 0.256716} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.865000] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.866299] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f819a6a-d828-42b2-a00d-e600bc1a1ed8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.896811] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc640c26-520e-4882-98ea-dc244c095c2b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.907209] env[62204]: DEBUG nova.policy [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 927.923767] env[62204]: DEBUG oslo_vmware.api [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200080, 'name': PowerOnVM_Task, 'duration_secs': 0.470493} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.924052] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 927.924244] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ee8480a8-146f-4d85-872a-5d5a7db0695a tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance '7b7032a8-8093-43fb-b2e2-c6308d96e819' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 928.223979] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085db77a-3e5b-42ce-ba08-707318c4869c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.232929] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a58352a-4708-4a19-9dbe-716bf1b79814 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.265057] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b2ca56-9fe6-4343-a14a-b0cd88fe1230 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.268716] env[62204]: DEBUG nova.network.neutron [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Successfully updated port: 5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 928.275779] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819796f2-8c7d-4fe1-b7f3-83dd24c692d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.294917] env[62204]: DEBUG nova.compute.provider_tree [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.300603] env[62204]: DEBUG nova.network.neutron [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Successfully created port: 4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.312444] env[62204]: DEBUG nova.compute.manager [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Received event network-changed-5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.312444] env[62204]: DEBUG nova.compute.manager [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Refreshing instance network info cache due to event network-changed-5f278903-c85c-4f50-82ed-edfb3fb819c1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 928.312695] env[62204]: DEBUG oslo_concurrency.lockutils [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] Acquiring lock "refresh_cache-4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.312791] env[62204]: DEBUG oslo_concurrency.lockutils [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] Acquired lock "refresh_cache-4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.316619] env[62204]: DEBUG nova.network.neutron [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Refreshing network info cache for port 5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 928.347843] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 928.410897] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 928.411204] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e91fc554-9186-4823-9ba1-ddd0c2377f07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.419781] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 928.419781] env[62204]: value = "task-1200081" [ 928.419781] env[62204]: _type = "Task" [ 928.419781] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.429246] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200081, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.507405] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.507636] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.653046] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.653350] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.653568] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.653752] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.653928] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.656799] env[62204]: INFO nova.compute.manager [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Terminating instance [ 928.658827] env[62204]: DEBUG nova.compute.manager [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 928.659042] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 928.659870] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931777a4-6d35-45d4-98c3-f141deb21501 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.667395] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.667675] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfed325e-fa05-41d0-a0a2-5ad84088e9d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.673895] env[62204]: DEBUG oslo_vmware.api [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 928.673895] env[62204]: value = "task-1200082" [ 928.673895] env[62204]: _type = "Task" [ 928.673895] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.682521] env[62204]: DEBUG oslo_vmware.api [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.771987] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "refresh_cache-4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.798239] env[62204]: DEBUG nova.scheduler.client.report [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.851787] env[62204]: DEBUG nova.network.neutron [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.937031] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200081, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.987216] env[62204]: DEBUG nova.network.neutron [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.009927] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 929.185488] env[62204]: DEBUG oslo_vmware.api [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200082, 'name': PowerOffVM_Task, 'duration_secs': 0.194186} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.185663] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 929.185805] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 929.186078] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b24f527-5edc-4058-959b-4b207e1cece2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.255733] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 929.255951] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 929.256197] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleting the datastore file [datastore1] 0a4a432d-a71a-4da7-be90-25dcec5a64c6 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.256562] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-964c1dd2-1423-4c47-ad3d-bb51841bc92b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.264640] env[62204]: DEBUG oslo_vmware.api [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 929.264640] env[62204]: value = "task-1200084" [ 929.264640] env[62204]: _type = "Task" [ 929.264640] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.275846] env[62204]: DEBUG oslo_vmware.api [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.304071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.968s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.307660] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.008s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.310345] env[62204]: INFO nova.compute.claims [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.335960] env[62204]: INFO nova.scheduler.client.report [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Deleted allocations for instance 431e7b20-22d8-4742-9c47-cdf9ee08fb32 [ 929.360700] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 929.390790] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 929.391182] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 929.391412] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.391664] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 929.392025] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.392025] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 929.392296] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 929.392485] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 929.392682] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 929.393302] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 929.393302] env[62204]: DEBUG nova.virt.hardware [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 929.394521] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbdd59c-89f1-4db9-98b4-549939c88c54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.404464] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f05b96-8f04-4aef-87d8-351637414706 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.429600] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200081, 'name': CreateSnapshot_Task, 'duration_secs': 0.979838} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.430242] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 929.430793] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57988832-0703-42ca-bdbb-d282d3542edf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.491839] env[62204]: DEBUG oslo_concurrency.lockutils [req-5c7934e3-35ef-4c1d-bdda-309b055c1d49 req-697aced3-0302-493f-9191-d3486d15534e service nova] Releasing lock "refresh_cache-4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.491839] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "refresh_cache-4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.491839] env[62204]: DEBUG nova.network.neutron [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 929.528203] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.739416] env[62204]: DEBUG nova.compute.manager [req-755fee4b-01ad-4269-83c3-22393787389d req-2c626f9d-0b2a-40dc-b583-1709ceaec034 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-vif-plugged-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.739789] env[62204]: DEBUG oslo_concurrency.lockutils [req-755fee4b-01ad-4269-83c3-22393787389d req-2c626f9d-0b2a-40dc-b583-1709ceaec034 service nova] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.740149] env[62204]: DEBUG oslo_concurrency.lockutils [req-755fee4b-01ad-4269-83c3-22393787389d req-2c626f9d-0b2a-40dc-b583-1709ceaec034 service nova] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.740445] env[62204]: DEBUG oslo_concurrency.lockutils [req-755fee4b-01ad-4269-83c3-22393787389d req-2c626f9d-0b2a-40dc-b583-1709ceaec034 service nova] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.740734] env[62204]: DEBUG nova.compute.manager [req-755fee4b-01ad-4269-83c3-22393787389d req-2c626f9d-0b2a-40dc-b583-1709ceaec034 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] No waiting events found dispatching network-vif-plugged-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 929.741030] env[62204]: WARNING nova.compute.manager [req-755fee4b-01ad-4269-83c3-22393787389d req-2c626f9d-0b2a-40dc-b583-1709ceaec034 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received unexpected event network-vif-plugged-4243893b-2fda-4a71-94f3-332643bceb52 for instance with vm_state building and task_state spawning. [ 929.774138] env[62204]: DEBUG oslo_vmware.api [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178643} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.774377] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.774567] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.774746] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.774916] env[62204]: INFO nova.compute.manager [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 929.775171] env[62204]: DEBUG oslo.service.loopingcall [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.775367] env[62204]: DEBUG nova.compute.manager [-] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 929.775463] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 929.824645] env[62204]: DEBUG nova.network.neutron [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Successfully updated port: 4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.844754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f9777c39-512c-4625-8333-c0683e0d4b13 tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "431e7b20-22d8-4742-9c47-cdf9ee08fb32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.186s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.951101] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 929.951908] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-40aaed6d-5a67-4c21-87c3-eeefa99bb9fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.960317] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 929.960317] env[62204]: value = "task-1200085" [ 929.960317] env[62204]: _type = "Task" [ 929.960317] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.968867] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200085, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.036153] env[62204]: DEBUG nova.network.neutron [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 930.203095] env[62204]: DEBUG nova.network.neutron [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Updating instance_info_cache with network_info: [{"id": "5f278903-c85c-4f50-82ed-edfb3fb819c1", "address": "fa:16:3e:af:4d:2c", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f278903-c8", "ovs_interfaceid": "5f278903-c85c-4f50-82ed-edfb3fb819c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.307116] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.307505] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.308232] env[62204]: DEBUG nova.compute.manager [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Going to confirm migration 2 {{(pid=62204) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 930.329214] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.329367] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.329540] env[62204]: DEBUG nova.network.neutron [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 930.475122] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200085, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.513603] env[62204]: DEBUG nova.compute.manager [req-5b71b7b5-7dae-4d61-8bf8-7b30502a42ab req-daff833d-7b89-4b8f-8a97-2a177138fe05 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Received event network-vif-deleted-ccf86a68-c525-4b8b-940f-b0a08f2d3831 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.513603] env[62204]: INFO nova.compute.manager [req-5b71b7b5-7dae-4d61-8bf8-7b30502a42ab req-daff833d-7b89-4b8f-8a97-2a177138fe05 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Neutron deleted interface ccf86a68-c525-4b8b-940f-b0a08f2d3831; detaching it from the instance and deleting it from the info cache [ 930.513778] env[62204]: DEBUG nova.network.neutron [req-5b71b7b5-7dae-4d61-8bf8-7b30502a42ab req-daff833d-7b89-4b8f-8a97-2a177138fe05 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.671180] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6dbbac-352a-4f2b-aa11-fb58d52dc4da {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.679345] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fd2bd2-2a25-41ac-accd-e1961f89abec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.710583] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "refresh_cache-4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.710902] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Instance network_info: |[{"id": "5f278903-c85c-4f50-82ed-edfb3fb819c1", "address": "fa:16:3e:af:4d:2c", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f278903-c8", "ovs_interfaceid": "5f278903-c85c-4f50-82ed-edfb3fb819c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 930.711698] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:4d:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f278903-c85c-4f50-82ed-edfb3fb819c1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 930.719339] env[62204]: DEBUG oslo.service.loopingcall [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 930.719900] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9207dad-8cc8-4562-b731-359382faf627 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.722497] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 930.722739] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-922a63dd-d698-45bb-bd57-ca3e40c07abd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.742424] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65d201e-4b28-4b80-b518-c2c1155d935b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.747071] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 930.747071] env[62204]: value = "task-1200086" [ 930.747071] env[62204]: _type = "Task" [ 930.747071] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.757599] env[62204]: DEBUG nova.compute.provider_tree [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.763209] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200086, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.845841] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.846058] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.846249] env[62204]: DEBUG nova.network.neutron [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 930.846480] env[62204]: DEBUG nova.objects.instance [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lazy-loading 'info_cache' on Instance uuid 7b7032a8-8093-43fb-b2e2-c6308d96e819 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.865716] env[62204]: DEBUG nova.network.neutron [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 930.871453] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.872334] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.872670] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.872952] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.873240] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.875415] env[62204]: INFO nova.compute.manager [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Terminating instance [ 930.877550] env[62204]: DEBUG nova.compute.manager [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 930.877778] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 930.878751] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b97a84e-0360-4efb-8136-cb7672a264f7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.889707] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 930.889984] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d915d6ef-1eb5-4ab2-b988-e53b56f1d797 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.896734] env[62204]: DEBUG oslo_vmware.api [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 930.896734] env[62204]: value = "task-1200087" [ 930.896734] env[62204]: _type = "Task" [ 930.896734] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.905131] env[62204]: DEBUG oslo_vmware.api [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.918279] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.971843] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200085, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.015233] env[62204]: DEBUG nova.network.neutron [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.018156] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bca4a45d-1ed1-4dad-ab4a-f34d32bc7a57 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.029054] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb763d4-a837-4319-9828-fee6b7780c33 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.066873] env[62204]: DEBUG nova.compute.manager [req-5b71b7b5-7dae-4d61-8bf8-7b30502a42ab req-daff833d-7b89-4b8f-8a97-2a177138fe05 service nova] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Detach interface failed, port_id=ccf86a68-c525-4b8b-940f-b0a08f2d3831, reason: Instance 0a4a432d-a71a-4da7-be90-25dcec5a64c6 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 931.262024] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200086, 'name': CreateVM_Task, 'duration_secs': 0.487326} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.262024] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 931.262024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.262024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.262024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 931.262024] env[62204]: DEBUG nova.scheduler.client.report [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.263986] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4671d61c-fbc0-4caa-99e0-cec98fb8b4ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.269263] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 931.269263] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52609653-63cc-3d5f-a742-902f7461df4b" [ 931.269263] env[62204]: _type = "Task" [ 931.269263] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.277452] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52609653-63cc-3d5f-a742-902f7461df4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.408703] env[62204]: DEBUG oslo_vmware.api [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200087, 'name': PowerOffVM_Task, 'duration_secs': 0.321697} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.408703] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.408703] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.408703] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e378197b-42cf-4eb5-a7ab-ac2fb1095729 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.421379] env[62204]: INFO nova.compute.manager [-] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Took 1.65 seconds to deallocate network for instance. [ 931.471709] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200085, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.518344] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.518796] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Instance network_info: |[{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 931.519178] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:e9:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4243893b-2fda-4a71-94f3-332643bceb52', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.526912] env[62204]: DEBUG oslo.service.loopingcall [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 931.528307] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.528584] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.528815] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.529017] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Deleting the datastore file [datastore1] 7412d7ef-b370-4253-8d57-d2bd5d06d6a9 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.529261] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-918c9089-8a33-4a13-83c0-ef5a4f066b84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.543762] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04f5d7f7-6b34-4337-afe4-8a0ed11c9902 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.552268] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.552268] env[62204]: value = "task-1200089" [ 931.552268] env[62204]: _type = "Task" [ 931.552268] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.554605] env[62204]: DEBUG oslo_vmware.api [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for the task: (returnval){ [ 931.554605] env[62204]: value = "task-1200090" [ 931.554605] env[62204]: _type = "Task" [ 931.554605] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.565375] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200089, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.568406] env[62204]: DEBUG oslo_vmware.api [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.766048] env[62204]: DEBUG nova.compute.manager [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-changed-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 931.766313] env[62204]: DEBUG nova.compute.manager [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing instance network info cache due to event network-changed-4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 931.766579] env[62204]: DEBUG oslo_concurrency.lockutils [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.766712] env[62204]: DEBUG oslo_concurrency.lockutils [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.766878] env[62204]: DEBUG nova.network.neutron [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 931.768723] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.769221] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 931.772295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.131s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.773747] env[62204]: INFO nova.compute.claims [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.787035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.787035] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Processing image a9e952fa-67fa-4a49-a75c-594f33aa3496 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 931.787193] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.787193] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.787375] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 931.787629] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e867cec-8cda-41a2-b2b4-a8cc348df01b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.800535] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 931.800724] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 931.801745] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-797b6e9f-a313-4409-a38d-0bcad071b0ea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.807264] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 931.807264] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260d677-37d1-c5b4-4ac5-c7ecc68d65be" [ 931.807264] env[62204]: _type = "Task" [ 931.807264] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.817292] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260d677-37d1-c5b4-4ac5-c7ecc68d65be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.928321] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.971373] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200085, 'name': CloneVM_Task, 'duration_secs': 1.678691} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.971759] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Created linked-clone VM from snapshot [ 931.972566] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627c0287-623b-4462-a761-4a4d3d9dedfa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.980054] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Uploading image 64aeea2b-e127-4ab2-abff-027b5881ee9a {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 932.007331] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 932.007331] env[62204]: value = "vm-260101" [ 932.007331] env[62204]: _type = "VirtualMachine" [ 932.007331] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 932.007636] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-28fe70be-c5f6-4d57-a0e1-33e67e78366c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.014047] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lease: (returnval){ [ 932.014047] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c2e27-598c-4944-a9e5-e855f88dc9cb" [ 932.014047] env[62204]: _type = "HttpNfcLease" [ 932.014047] env[62204]: } obtained for exporting VM: (result){ [ 932.014047] env[62204]: value = "vm-260101" [ 932.014047] env[62204]: _type = "VirtualMachine" [ 932.014047] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 932.014382] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the lease: (returnval){ [ 932.014382] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c2e27-598c-4944-a9e5-e855f88dc9cb" [ 932.014382] env[62204]: _type = "HttpNfcLease" [ 932.014382] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 932.020501] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.020501] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c2e27-598c-4944-a9e5-e855f88dc9cb" [ 932.020501] env[62204]: _type = "HttpNfcLease" [ 932.020501] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 932.058538] env[62204]: DEBUG nova.network.neutron [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance_info_cache with network_info: [{"id": "830a7992-393b-4d36-82d8-b660d6904ae7", "address": "fa:16:3e:01:7a:45", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap830a7992-39", "ovs_interfaceid": "830a7992-393b-4d36-82d8-b660d6904ae7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.066594] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200089, 'name': CreateVM_Task, 'duration_secs': 0.340827} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.067071] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 932.067963] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.068091] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.068518] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 932.072098] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae671dc5-e6c7-47e6-8223-19ea0e6d0226 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.073704] env[62204]: DEBUG oslo_vmware.api [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Task: {'id': task-1200090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175983} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.074306] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.074593] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.074794] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.074980] env[62204]: INFO nova.compute.manager [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Took 1.20 seconds to destroy the instance on the hypervisor. [ 932.075238] env[62204]: DEBUG oslo.service.loopingcall [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.075762] env[62204]: DEBUG nova.compute.manager [-] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 932.075859] env[62204]: DEBUG nova.network.neutron [-] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 932.078757] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 932.078757] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c6df85-d6ae-4cdf-313b-cf624d451fa6" [ 932.078757] env[62204]: _type = "Task" [ 932.078757] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.086484] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c6df85-d6ae-4cdf-313b-cf624d451fa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.282372] env[62204]: DEBUG nova.compute.utils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 932.284147] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 932.284419] env[62204]: DEBUG nova.network.neutron [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 932.323125] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 932.324087] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Fetch image to [datastore1] OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9/OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 932.324087] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Downloading stream optimized image a9e952fa-67fa-4a49-a75c-594f33aa3496 to [datastore1] OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9/OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9.vmdk on the data store datastore1 as vApp {{(pid=62204) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 932.324087] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Downloading image file data a9e952fa-67fa-4a49-a75c-594f33aa3496 to the ESX as VM named 'OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9' {{(pid=62204) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 932.361052] env[62204]: DEBUG nova.policy [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b52997d8756d4096b3dcba62f0bd14b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e70013d6da84d2b9a0719621c9f2c1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 932.395299] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 932.395299] env[62204]: value = "resgroup-9" [ 932.395299] env[62204]: _type = "ResourcePool" [ 932.395299] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 932.395611] env[62204]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-c377fb8f-c918-4305-a2e3-42812bbc93ce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.418585] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lease: (returnval){ [ 932.418585] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260bddf-732d-a6e5-30c9-c46685246eb8" [ 932.418585] env[62204]: _type = "HttpNfcLease" [ 932.418585] env[62204]: } obtained for vApp import into resource pool (val){ [ 932.418585] env[62204]: value = "resgroup-9" [ 932.418585] env[62204]: _type = "ResourcePool" [ 932.418585] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 932.419030] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the lease: (returnval){ [ 932.419030] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260bddf-732d-a6e5-30c9-c46685246eb8" [ 932.419030] env[62204]: _type = "HttpNfcLease" [ 932.419030] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 932.425105] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.425105] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260bddf-732d-a6e5-30c9-c46685246eb8" [ 932.425105] env[62204]: _type = "HttpNfcLease" [ 932.425105] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 932.521522] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.521522] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c2e27-598c-4944-a9e5-e855f88dc9cb" [ 932.521522] env[62204]: _type = "HttpNfcLease" [ 932.521522] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 932.521999] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 932.521999] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]529c2e27-598c-4944-a9e5-e855f88dc9cb" [ 932.521999] env[62204]: _type = "HttpNfcLease" [ 932.521999] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 932.522583] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577cf642-8d78-4bcc-84c0-2e1c368b2ade {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.529512] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cbfb95-eb03-770d-20af-04c6b9ce9a98/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 932.529689] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cbfb95-eb03-770d-20af-04c6b9ce9a98/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 932.585896] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-7b7032a8-8093-43fb-b2e2-c6308d96e819" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.586205] env[62204]: DEBUG nova.objects.instance [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lazy-loading 'migration_context' on Instance uuid 7b7032a8-8093-43fb-b2e2-c6308d96e819 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.601259] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c6df85-d6ae-4cdf-313b-cf624d451fa6, 'name': SearchDatastore_Task, 'duration_secs': 0.008776} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.601614] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.601887] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 932.602167] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.602344] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.602535] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 932.602822] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b67ee971-ebc3-44ce-bccc-554b986cbcfe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.611496] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 932.611770] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 932.612570] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c7cd6bb-879d-43b9-a7d9-7f0b3010abb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.618764] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 932.618764] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a82f27-9ecd-793e-680f-77fc82a97456" [ 932.618764] env[62204]: _type = "Task" [ 932.618764] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.628452] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a82f27-9ecd-793e-680f-77fc82a97456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.631142] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-53337c52-9689-408c-8d0e-e8631e9e2074 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.785047] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 932.804562] env[62204]: DEBUG nova.network.neutron [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updated VIF entry in instance network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 932.804562] env[62204]: DEBUG nova.network.neutron [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.818971] env[62204]: DEBUG nova.network.neutron [-] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.929048] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 932.929048] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260bddf-732d-a6e5-30c9-c46685246eb8" [ 932.929048] env[62204]: _type = "HttpNfcLease" [ 932.929048] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 933.091230] env[62204]: DEBUG nova.objects.base [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Object Instance<7b7032a8-8093-43fb-b2e2-c6308d96e819> lazy-loaded attributes: info_cache,migration_context {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 933.092620] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d4cfe9-54bc-4e5f-9d5e-606ae9579bca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.119030] env[62204]: DEBUG nova.network.neutron [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Successfully created port: 444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.125656] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59d8dcdd-d4b6-4a31-8c19-04d5e3927846 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.139921] env[62204]: DEBUG oslo_vmware.api [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 933.139921] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f4ddb4-70cd-0bf4-5f68-19acfdffc328" [ 933.139921] env[62204]: _type = "Task" [ 933.139921] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.146395] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a82f27-9ecd-793e-680f-77fc82a97456, 'name': SearchDatastore_Task, 'duration_secs': 0.008672} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.156296] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78a921a8-5254-4b90-a97a-16e350bee381 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.168242] env[62204]: DEBUG oslo_vmware.api [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f4ddb4-70cd-0bf4-5f68-19acfdffc328, 'name': SearchDatastore_Task, 'duration_secs': 0.007141} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.173835] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.174453] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 933.174453] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520619b1-575b-59de-75dc-f4714e8d445d" [ 933.174453] env[62204]: _type = "Task" [ 933.174453] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.188320] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520619b1-575b-59de-75dc-f4714e8d445d, 'name': SearchDatastore_Task, 'duration_secs': 0.009295} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.193161] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.193698] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 7671c77f-3da8-4a41-a472-138c7bd23a92/7671c77f-3da8-4a41-a472-138c7bd23a92.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 933.194537] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f54e5276-a73c-4dd2-bc72-41406ca4a94e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.202860] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 933.202860] env[62204]: value = "task-1200093" [ 933.202860] env[62204]: _type = "Task" [ 933.202860] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.214486] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200093, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.219997] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a10b01e-a0e7-4b6b-9eea-e084910a676b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.227616] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc925f3-dfb9-4e88-843d-422fffc6a63d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.259668] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d99568-32c6-4809-af82-71f08b045169 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.268019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f739a0c-a8b6-46e6-8e87-c2d54cbc2551 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.282453] env[62204]: DEBUG nova.compute.provider_tree [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.308489] env[62204]: DEBUG oslo_concurrency.lockutils [req-8023703d-ebc6-4880-b6f0-45d61d50a93d req-3fb59e05-1194-488c-b714-489cceb4c3d4 service nova] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.326716] env[62204]: INFO nova.compute.manager [-] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Took 1.25 seconds to deallocate network for instance. [ 933.428580] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 933.428580] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260bddf-732d-a6e5-30c9-c46685246eb8" [ 933.428580] env[62204]: _type = "HttpNfcLease" [ 933.428580] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 933.429020] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 933.429020] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260bddf-732d-a6e5-30c9-c46685246eb8" [ 933.429020] env[62204]: _type = "HttpNfcLease" [ 933.429020] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 933.429916] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5155c36e-d33f-4e16-bbd6-4458aecd0065 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.438900] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d5e6d-fa27-c3e0-40b3-ce3758b25998/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 933.439276] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d5e6d-fa27-c3e0-40b3-ce3758b25998/disk-0.vmdk. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 933.509554] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9d8b8d55-0798-4574-a405-67b1192aefd4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.721571] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200093, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474722} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.721969] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 7671c77f-3da8-4a41-a472-138c7bd23a92/7671c77f-3da8-4a41-a472-138c7bd23a92.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.722129] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.722736] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f58a34c-713d-4290-beea-97bc869acf88 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.730258] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 933.730258] env[62204]: value = "task-1200094" [ 933.730258] env[62204]: _type = "Task" [ 933.730258] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.742304] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200094, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.787165] env[62204]: DEBUG nova.scheduler.client.report [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 933.798108] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 933.833677] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 933.860110] env[62204]: DEBUG nova.compute.manager [req-ead0cefe-d4ca-4c1e-bb5f-a77b0f557f1a req-eaff4a7d-4cd9-4a3c-b863-8b11c22f80a4 service nova] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Received event network-vif-deleted-71f7fdd0-1ed7-463d-86be-055c23851225 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 933.862416] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 933.864031] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 933.864031] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.864031] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 933.864031] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.864031] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 933.864031] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 933.864393] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 933.864393] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 933.864465] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 933.864589] env[62204]: DEBUG nova.virt.hardware [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 933.866077] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89a24bd-326b-42ee-88b1-3c40709a14e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.886812] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a84aea-d88a-4f06-9d9e-7695c05d9219 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.084815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "f445a8ea-ff21-44e9-8389-231a03c51650" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.084815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "f445a8ea-ff21-44e9-8389-231a03c51650" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.085025] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "f445a8ea-ff21-44e9-8389-231a03c51650-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.085064] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "f445a8ea-ff21-44e9-8389-231a03c51650-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.086030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "f445a8ea-ff21-44e9-8389-231a03c51650-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.088268] env[62204]: INFO nova.compute.manager [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Terminating instance [ 934.092829] env[62204]: DEBUG nova.compute.manager [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 934.095839] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.095839] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9cb631-a66f-473d-bbd9-2fca43a91fec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.102629] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 934.103187] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2eaa2f8d-c6f7-46f9-a60d-8ac49c8f4fe3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.114492] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 934.114492] env[62204]: value = "task-1200095" [ 934.114492] env[62204]: _type = "Task" [ 934.114492] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.124294] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.244457] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200094, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098176} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.245328] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.246152] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ffcef0-4027-4573-835a-9f4f58973de0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.277668] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 7671c77f-3da8-4a41-a472-138c7bd23a92/7671c77f-3da8-4a41-a472-138c7bd23a92.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.283710] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c899055-7b2f-415a-b29e-c79128ec7892 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.302428] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 934.302693] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d5e6d-fa27-c3e0-40b3-ce3758b25998/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 934.303563] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.531s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.304123] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 934.307682] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36714dfd-3801-40f2-8c51-8662bf36573b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.311193] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.405s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.311487] env[62204]: DEBUG nova.objects.instance [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid 7a0e579d-38e7-4f04-bf4d-1076dfc3b374 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 934.317325] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d5e6d-fa27-c3e0-40b3-ce3758b25998/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 934.317325] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d5e6d-fa27-c3e0-40b3-ce3758b25998/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 934.317325] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-9aef628d-9c8a-4c72-972d-6f4b040abef0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.321202] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 934.321202] env[62204]: value = "task-1200096" [ 934.321202] env[62204]: _type = "Task" [ 934.321202] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.331256] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200096, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.508867] env[62204]: DEBUG oslo_vmware.rw_handles [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523d5e6d-fa27-c3e0-40b3-ce3758b25998/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 934.508867] env[62204]: INFO nova.virt.vmwareapi.images [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Downloaded image file data a9e952fa-67fa-4a49-a75c-594f33aa3496 [ 934.508867] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d62efb-fde8-4559-9fec-eaaa6b850809 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.526879] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1eb9e810-b694-4fc2-814e-b6b6755aa1e8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.623922] env[62204]: DEBUG nova.compute.manager [req-1b0e10c6-34f6-4a26-9013-20712548fb2e req-0a2322e6-790a-443c-844b-96299b6adf72 service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Received event network-vif-plugged-444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.624258] env[62204]: DEBUG oslo_concurrency.lockutils [req-1b0e10c6-34f6-4a26-9013-20712548fb2e req-0a2322e6-790a-443c-844b-96299b6adf72 service nova] Acquiring lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.624485] env[62204]: DEBUG oslo_concurrency.lockutils [req-1b0e10c6-34f6-4a26-9013-20712548fb2e req-0a2322e6-790a-443c-844b-96299b6adf72 service nova] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.624603] env[62204]: DEBUG oslo_concurrency.lockutils [req-1b0e10c6-34f6-4a26-9013-20712548fb2e req-0a2322e6-790a-443c-844b-96299b6adf72 service nova] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.624803] env[62204]: DEBUG nova.compute.manager [req-1b0e10c6-34f6-4a26-9013-20712548fb2e req-0a2322e6-790a-443c-844b-96299b6adf72 service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] No waiting events found dispatching network-vif-plugged-444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 934.624983] env[62204]: WARNING nova.compute.manager [req-1b0e10c6-34f6-4a26-9013-20712548fb2e req-0a2322e6-790a-443c-844b-96299b6adf72 service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Received unexpected event network-vif-plugged-444f8f64-f9a6-4e48-ba55-fe1b6be68af2 for instance with vm_state building and task_state spawning. [ 934.627402] env[62204]: INFO nova.virt.vmwareapi.images [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] The imported VM was unregistered [ 934.629433] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 934.629675] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating directory with path [datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.633461] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d7d9ef5-6794-46ec-b6ed-42345d9b3f91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.635904] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.659036] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created directory with path [datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.659268] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9/OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9.vmdk to [datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk. {{(pid=62204) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 934.659533] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-81fa35fd-e591-425e-b127-ac9894993ec8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.666124] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 934.666124] env[62204]: value = "task-1200098" [ 934.666124] env[62204]: _type = "Task" [ 934.666124] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.676176] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200098, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.720875] env[62204]: DEBUG nova.network.neutron [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Successfully updated port: 444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 934.813598] env[62204]: DEBUG nova.compute.utils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 934.815145] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 934.815697] env[62204]: DEBUG nova.network.neutron [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 935.634400] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 935.638455] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.638693] env[62204]: DEBUG nova.network.neutron [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 935.656453] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200095, 'name': PowerOffVM_Task, 'duration_secs': 1.014065} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.660111] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 935.660350] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.660489] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200096, 'name': ReconfigVM_Task, 'duration_secs': 0.515673} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.664037] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e4fd4fb-67a2-4ee5-ad73-7df087f66f87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.665928] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 7671c77f-3da8-4a41-a472-138c7bd23a92/7671c77f-3da8-4a41-a472-138c7bd23a92.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 935.666772] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200098, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.669571] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-524ac7a6-2db1-426e-b74f-015e05451746 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.677293] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 935.677293] env[62204]: value = "task-1200099" [ 935.677293] env[62204]: _type = "Task" [ 935.677293] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.685991] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200099, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.691654] env[62204]: DEBUG nova.policy [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6054f141cad7421f85bbb5944f408070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6907df6f17b142c0b4881f15f3b88a9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 935.750555] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.750786] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.750972] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Deleting the datastore file [datastore2] f445a8ea-ff21-44e9-8389-231a03c51650 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.751309] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4f26939-e7e2-41f1-8fa4-81ce4b841ca0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.764965] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for the task: (returnval){ [ 935.764965] env[62204]: value = "task-1200101" [ 935.764965] env[62204]: _type = "Task" [ 935.764965] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.777888] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.989892] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7239a979-2dd1-4d73-bb52-dfc550054082 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.004645] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6638602b-75a3-42e1-a13b-3c66f4aa0a61 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.043024] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa9b498-98ca-4325-b963-c0c2ff0bc587 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.054301] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7c2613-5a98-4551-a583-e6d219e0a95a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.074333] env[62204]: DEBUG nova.compute.provider_tree [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.134434] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 936.155598] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200098, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.199212] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200099, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.225501] env[62204]: DEBUG nova.network.neutron [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 936.279360] env[62204]: DEBUG oslo_vmware.api [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Task: {'id': task-1200101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287995} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.279752] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.279947] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.280152] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.280540] env[62204]: INFO nova.compute.manager [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Took 2.19 seconds to destroy the instance on the hypervisor. [ 936.281014] env[62204]: DEBUG oslo.service.loopingcall [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 936.281459] env[62204]: DEBUG nova.compute.manager [-] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 936.281611] env[62204]: DEBUG nova.network.neutron [-] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 936.304189] env[62204]: DEBUG nova.network.neutron [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Successfully created port: 8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 936.578229] env[62204]: DEBUG nova.scheduler.client.report [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.666624] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200098, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.685242] env[62204]: DEBUG nova.network.neutron [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Updating instance_info_cache with network_info: [{"id": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "address": "fa:16:3e:87:a8:90", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap444f8f64-f9", "ovs_interfaceid": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.707876] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200099, 'name': Rename_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.083627] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.772s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.086674] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.575s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 937.087214] env[62204]: DEBUG nova.objects.instance [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lazy-loading 'resources' on Instance uuid 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.115903] env[62204]: INFO nova.scheduler.client.report [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance 7a0e579d-38e7-4f04-bf4d-1076dfc3b374 [ 937.154126] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200098, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.154409] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 937.187272] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 937.189025] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 937.189507] env[62204]: DEBUG nova.virt.hardware [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 937.190433] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cf2d53-12a2-4ec7-92ca-16c69f39fb59 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.196283] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 937.196655] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Instance network_info: |[{"id": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "address": "fa:16:3e:87:a8:90", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap444f8f64-f9", "ovs_interfaceid": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 937.196977] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200099, 'name': Rename_Task, 'duration_secs': 1.04036} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.197833] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:a8:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '444f8f64-f9a6-4e48-ba55-fe1b6be68af2', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.205952] env[62204]: DEBUG oslo.service.loopingcall [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 937.207385] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.211211] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 937.211473] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35b43853-be97-4302-b5b2-a9cb79fdd0ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.216033] env[62204]: DEBUG nova.compute.manager [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Received event network-changed-444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.216209] env[62204]: DEBUG nova.compute.manager [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Refreshing instance network info cache due to event network-changed-444f8f64-f9a6-4e48-ba55-fe1b6be68af2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 937.216460] env[62204]: DEBUG oslo_concurrency.lockutils [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] Acquiring lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 937.216615] env[62204]: DEBUG oslo_concurrency.lockutils [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] Acquired lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.216801] env[62204]: DEBUG nova.network.neutron [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Refreshing network info cache for port 444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 937.219879] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6b38b51-505d-4576-aaff-41f47c7b8a1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.237022] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b140e55e-0900-4563-9d38-a5356ff92084 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.244307] env[62204]: DEBUG nova.compute.manager [req-b378c897-71be-41c8-9c58-4fb4df0da709 req-5923bf86-1d48-47a4-be92-b84dce2c71f5 service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Received event network-vif-deleted-454e3072-3434-44df-b410-3e41abc2baca {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 937.244307] env[62204]: INFO nova.compute.manager [req-b378c897-71be-41c8-9c58-4fb4df0da709 req-5923bf86-1d48-47a4-be92-b84dce2c71f5 service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Neutron deleted interface 454e3072-3434-44df-b410-3e41abc2baca; detaching it from the instance and deleting it from the info cache [ 937.244307] env[62204]: DEBUG nova.network.neutron [req-b378c897-71be-41c8-9c58-4fb4df0da709 req-5923bf86-1d48-47a4-be92-b84dce2c71f5 service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.248395] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 937.248395] env[62204]: value = "task-1200102" [ 937.248395] env[62204]: _type = "Task" [ 937.248395] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.262869] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.262869] env[62204]: value = "task-1200103" [ 937.262869] env[62204]: _type = "Task" [ 937.262869] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.270378] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200102, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.276265] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200103, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.289363] env[62204]: DEBUG nova.network.neutron [-] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.625846] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96b8506-2501-4912-ab8b-859da2a2890a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "7a0e579d-38e7-4f04-bf4d-1076dfc3b374" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.901s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 937.651225] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200098, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.631224} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.651225] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9/OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9.vmdk to [datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk. [ 937.651678] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Cleaning up location [datastore1] OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 937.652045] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_8108cad3-d3e4-4dec-9718-7835762336c9 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.652475] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a345f9cd-a10f-41e9-be6d-13314c23ea44 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.663241] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 937.663241] env[62204]: value = "task-1200104" [ 937.663241] env[62204]: _type = "Task" [ 937.663241] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.675908] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.746587] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c81e69d-1ab2-4d38-bb97-8b9e085a05f6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.757104] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b61573-bf15-4d6b-b4af-bb6e5b8c7b80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.793385] env[62204]: INFO nova.compute.manager [-] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Took 1.51 seconds to deallocate network for instance. [ 937.800759] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200103, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.822400] env[62204]: DEBUG nova.compute.manager [req-b378c897-71be-41c8-9c58-4fb4df0da709 req-5923bf86-1d48-47a4-be92-b84dce2c71f5 service nova] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Detach interface failed, port_id=454e3072-3434-44df-b410-3e41abc2baca, reason: Instance f445a8ea-ff21-44e9-8389-231a03c51650 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 937.822623] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200102, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.997595] env[62204]: DEBUG nova.network.neutron [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Updated VIF entry in instance network info cache for port 444f8f64-f9a6-4e48-ba55-fe1b6be68af2. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 937.998058] env[62204]: DEBUG nova.network.neutron [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Updating instance_info_cache with network_info: [{"id": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "address": "fa:16:3e:87:a8:90", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap444f8f64-f9", "ovs_interfaceid": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.031865] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69db8df9-6690-4797-bc88-7e4ba5153742 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.041121] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36c8009-5f76-448b-8af5-d7344320cd20 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.072674] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6180a80e-de0c-4380-b859-9ab642cd653a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.080728] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60242bc7-a30f-432c-a4bd-44ec87f05e09 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.094423] env[62204]: DEBUG nova.compute.provider_tree [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.176519] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042392} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.176802] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.176996] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.177279] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk to [datastore1] 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a/4fd0c913-8344-4fb9-91ad-f8ab64c6e89a.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 938.177699] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f485db6-660c-4822-9654-951c38b8e337 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.184068] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 938.184068] env[62204]: value = "task-1200105" [ 938.184068] env[62204]: _type = "Task" [ 938.184068] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.191748] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.289107] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200103, 'name': CreateVM_Task, 'duration_secs': 0.648737} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.292142] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 938.292477] env[62204]: DEBUG oslo_vmware.api [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200102, 'name': PowerOnVM_Task, 'duration_secs': 0.643409} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.293159] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.293332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.293665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 938.293996] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.294262] env[62204]: INFO nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Took 8.93 seconds to spawn the instance on the hypervisor. [ 938.294448] env[62204]: DEBUG nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 938.294716] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47421113-5a40-40e7-907d-05290e737156 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.296941] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52715f53-78cb-4a4a-b72f-a0d91900943d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.302859] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 938.302859] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5246b971-646c-45b7-70c5-e34a03bf612f" [ 938.302859] env[62204]: _type = "Task" [ 938.302859] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.315466] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5246b971-646c-45b7-70c5-e34a03bf612f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.327840] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.466482] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 938.466785] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 938.502386] env[62204]: DEBUG oslo_concurrency.lockutils [req-bdc40083-6c73-44b7-ae94-7b6a7fcd41cb req-60061dc8-90a6-4267-86fb-701d6ca37e6b service nova] Releasing lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.598516] env[62204]: DEBUG nova.scheduler.client.report [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.697527] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200105, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.811656] env[62204]: DEBUG nova.network.neutron [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Successfully updated port: 8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.823265] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5246b971-646c-45b7-70c5-e34a03bf612f, 'name': SearchDatastore_Task, 'duration_secs': 0.019978} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.824684] env[62204]: INFO nova.compute.manager [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Took 22.40 seconds to build instance. [ 938.825707] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 938.829022] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.829022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 938.829022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.829022] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.829022] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f4c0ad2-f7a5-4d8b-9a87-f01cadfddc62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.847332] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.847634] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.848661] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-883e4fb2-fea6-4005-87d7-08f563de491a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.856636] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 938.856636] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e90da1-5740-985a-315a-bc8e39fefcad" [ 938.856636] env[62204]: _type = "Task" [ 938.856636] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.866455] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e90da1-5740-985a-315a-bc8e39fefcad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.969445] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 939.104071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.106671] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.578s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.109131] env[62204]: INFO nova.compute.claims [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.127171] env[62204]: INFO nova.scheduler.client.report [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Deleted allocations for instance 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1 [ 939.195438] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200105, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.320016] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.320016] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.320016] env[62204]: DEBUG nova.network.neutron [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 939.328793] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2a538238-6416-4188-b9ec-36737a95c3f1 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.913s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.375993] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e90da1-5740-985a-315a-bc8e39fefcad, 'name': SearchDatastore_Task, 'duration_secs': 0.082851} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.375993] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75e613d9-f86d-463a-903f-bd3918d2f791 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.381546] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 939.381546] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522b2ba7-ce24-438c-d6cb-bb4cfa0e2b03" [ 939.381546] env[62204]: _type = "Task" [ 939.381546] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.391216] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522b2ba7-ce24-438c-d6cb-bb4cfa0e2b03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.421455] env[62204]: DEBUG nova.compute.manager [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Received event network-vif-plugged-8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 939.421757] env[62204]: DEBUG oslo_concurrency.lockutils [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] Acquiring lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.421983] env[62204]: DEBUG oslo_concurrency.lockutils [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.423142] env[62204]: DEBUG oslo_concurrency.lockutils [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.423395] env[62204]: DEBUG nova.compute.manager [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] No waiting events found dispatching network-vif-plugged-8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 939.423600] env[62204]: WARNING nova.compute.manager [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Received unexpected event network-vif-plugged-8d2819e6-83ce-46a3-80c6-ee04624e7556 for instance with vm_state building and task_state spawning. [ 939.423785] env[62204]: DEBUG nova.compute.manager [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Received event network-changed-8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 939.423991] env[62204]: DEBUG nova.compute.manager [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Refreshing instance network info cache due to event network-changed-8d2819e6-83ce-46a3-80c6-ee04624e7556. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 939.424201] env[62204]: DEBUG oslo_concurrency.lockutils [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] Acquiring lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.427999] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.428231] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.491764] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.636305] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f766e4e4-7ca5-4779-ad0b-5a00ec4392ee tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.020s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.697012] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200105, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.876650] env[62204]: DEBUG nova.network.neutron [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 939.894666] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522b2ba7-ce24-438c-d6cb-bb4cfa0e2b03, 'name': SearchDatastore_Task, 'duration_secs': 0.084374} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.895079] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 939.895495] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 9cf3ca28-443f-4e06-9f04-103b5b6cddd4/9cf3ca28-443f-4e06-9f04-103b5b6cddd4.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.895835] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f439d76b-fe71-48bc-9518-51b8bfc6d1bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.903026] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 939.903026] env[62204]: value = "task-1200106" [ 939.903026] env[62204]: _type = "Task" [ 939.903026] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.919688] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.931475] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 939.943497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "2b728904-19ef-4773-9260-c615da522801" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.943680] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "2b728904-19ef-4773-9260-c615da522801" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.943860] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "2b728904-19ef-4773-9260-c615da522801-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.944060] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "2b728904-19ef-4773-9260-c615da522801-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.944233] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "2b728904-19ef-4773-9260-c615da522801-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.947197] env[62204]: INFO nova.compute.manager [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Terminating instance [ 939.950576] env[62204]: DEBUG nova.compute.manager [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 939.952057] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.952730] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43905e1-2bfa-49a2-b189-5074b77384e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.967123] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.967123] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c319835b-0821-4f7f-9f2c-6403743a9b29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.976246] env[62204]: DEBUG oslo_vmware.api [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 939.976246] env[62204]: value = "task-1200107" [ 939.976246] env[62204]: _type = "Task" [ 939.976246] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.993810] env[62204]: DEBUG oslo_vmware.api [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200107, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.196917] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200105, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.305669] env[62204]: DEBUG nova.network.neutron [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updating instance_info_cache with network_info: [{"id": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "address": "fa:16:3e:a0:dd:91", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2819e6-83", "ovs_interfaceid": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.417394] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.455844] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 940.488634] env[62204]: DEBUG oslo_vmware.api [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200107, 'name': PowerOffVM_Task, 'duration_secs': 0.395618} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.489059] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.489306] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 940.492827] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15b2448d-721c-45c1-b576-d8fb686e10ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.526186] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ce7832-ce7f-488e-8a49-419f7a0124b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.538601] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad45d08-2845-4f2d-95ef-2739fef3ff2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.584641] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29abbca-7901-4679-87ad-c09598c4772f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.589203] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.589294] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.589464] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Deleting the datastore file [datastore2] 2b728904-19ef-4773-9260-c615da522801 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.590664] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6fb23a0-2efd-4511-bc1e-aab1f0809ff7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.596636] env[62204]: DEBUG oslo_vmware.api [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for the task: (returnval){ [ 940.596636] env[62204]: value = "task-1200109" [ 940.596636] env[62204]: _type = "Task" [ 940.596636] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.603930] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7769020a-2f4f-4ae3-9137-69747e18885b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.620796] env[62204]: DEBUG nova.compute.provider_tree [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.625579] env[62204]: DEBUG oslo_vmware.api [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.696702] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200105, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.4685} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.697071] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9e952fa-67fa-4a49-a75c-594f33aa3496/a9e952fa-67fa-4a49-a75c-594f33aa3496.vmdk to [datastore1] 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a/4fd0c913-8344-4fb9-91ad-f8ab64c6e89a.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.697909] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b08e01-dc5c-40b5-9129-0671b1e0eee4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.721409] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a/4fd0c913-8344-4fb9-91ad-f8ab64c6e89a.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 940.721790] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec022082-f106-407d-b56c-c4af8ad96d5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.742161] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 940.742161] env[62204]: value = "task-1200110" [ 940.742161] env[62204]: _type = "Task" [ 940.742161] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.750676] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.812321] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.812837] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Instance network_info: |[{"id": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "address": "fa:16:3e:a0:dd:91", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2819e6-83", "ovs_interfaceid": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 940.813319] env[62204]: DEBUG oslo_concurrency.lockutils [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] Acquired lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.813574] env[62204]: DEBUG nova.network.neutron [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Refreshing network info cache for port 8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 940.815316] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:dd:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d2819e6-83ce-46a3-80c6-ee04624e7556', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.823616] env[62204]: DEBUG oslo.service.loopingcall [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.827569] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.828295] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0970a44b-5650-4653-be04-1eb098af4646 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.856571] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.856571] env[62204]: value = "task-1200111" [ 940.856571] env[62204]: _type = "Task" [ 940.856571] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.867689] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200111, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.926092] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200106, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.110611] env[62204]: DEBUG oslo_vmware.api [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Task: {'id': task-1200109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.364309} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.110952] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.111369] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.111637] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.111866] env[62204]: INFO nova.compute.manager [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] [instance: 2b728904-19ef-4773-9260-c615da522801] Took 1.16 seconds to destroy the instance on the hypervisor. [ 941.112158] env[62204]: DEBUG oslo.service.loopingcall [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 941.112482] env[62204]: DEBUG nova.compute.manager [-] [instance: 2b728904-19ef-4773-9260-c615da522801] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 941.112679] env[62204]: DEBUG nova.network.neutron [-] [instance: 2b728904-19ef-4773-9260-c615da522801] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 941.129418] env[62204]: DEBUG nova.scheduler.client.report [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 941.239815] env[62204]: DEBUG nova.network.neutron [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updated VIF entry in instance network info cache for port 8d2819e6-83ce-46a3-80c6-ee04624e7556. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 941.240275] env[62204]: DEBUG nova.network.neutron [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updating instance_info_cache with network_info: [{"id": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "address": "fa:16:3e:a0:dd:91", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2819e6-83", "ovs_interfaceid": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.252209] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200110, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.369931] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200111, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.416877] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200106, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.155782} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.417184] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 9cf3ca28-443f-4e06-9f04-103b5b6cddd4/9cf3ca28-443f-4e06-9f04-103b5b6cddd4.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 941.417436] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 941.417715] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3226c010-857f-4b60-a36d-9dc45f591070 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.423938] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 941.423938] env[62204]: value = "task-1200112" [ 941.423938] env[62204]: _type = "Task" [ 941.423938] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.431909] env[62204]: DEBUG nova.compute.manager [req-14e9ae5d-6918-4d49-9437-2402c234fb33 req-db45a0db-32f6-464b-9e4d-48168b467361 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Received event network-vif-deleted-07d4d455-7847-4302-9485-d456629057e7 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.432193] env[62204]: INFO nova.compute.manager [req-14e9ae5d-6918-4d49-9437-2402c234fb33 req-db45a0db-32f6-464b-9e4d-48168b467361 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Neutron deleted interface 07d4d455-7847-4302-9485-d456629057e7; detaching it from the instance and deleting it from the info cache [ 941.432395] env[62204]: DEBUG nova.network.neutron [req-14e9ae5d-6918-4d49-9437-2402c234fb33 req-db45a0db-32f6-464b-9e4d-48168b467361 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.440036] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200112, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.467110] env[62204]: DEBUG nova.compute.manager [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.467627] env[62204]: DEBUG nova.compute.manager [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing instance network info cache due to event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 941.467740] env[62204]: DEBUG oslo_concurrency.lockutils [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.467977] env[62204]: DEBUG oslo_concurrency.lockutils [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.468262] env[62204]: DEBUG nova.network.neutron [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 941.638642] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.639393] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 941.642606] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.714s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.642909] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.645394] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.472s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.679974] env[62204]: INFO nova.scheduler.client.report [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted allocations for instance 0a4a432d-a71a-4da7-be90-25dcec5a64c6 [ 941.749860] env[62204]: DEBUG oslo_concurrency.lockutils [req-d230556e-4551-431f-b28a-447d3b5109e2 req-335095e6-f902-4ee1-8aac-1e017efbd8cc service nova] Releasing lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 941.756143] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200110, 'name': ReconfigVM_Task, 'duration_secs': 0.524212} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.757124] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a/4fd0c913-8344-4fb9-91ad-f8ab64c6e89a.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.758289] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21c361fd-ab1e-4fc2-81bc-f93ff0297b81 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.764767] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 941.764767] env[62204]: value = "task-1200113" [ 941.764767] env[62204]: _type = "Task" [ 941.764767] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.773800] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200113, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.867814] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200111, 'name': CreateVM_Task, 'duration_secs': 0.618938} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.868068] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.868590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.868766] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.869099] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.869365] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e07e7e53-ecfd-4de1-b457-2bbfd28ed610 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.875032] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 941.875032] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52000bf4-7e38-c3db-c54c-3fd6572961d7" [ 941.875032] env[62204]: _type = "Task" [ 941.875032] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.884401] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52000bf4-7e38-c3db-c54c-3fd6572961d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.886790] env[62204]: DEBUG nova.network.neutron [-] [instance: 2b728904-19ef-4773-9260-c615da522801] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.934338] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200112, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076173} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.934613] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.935783] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7e0177-d0b3-40e9-a5d3-c92b6d5dfaac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.938484] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15372fa7-9a39-4cf0-9d8d-e2ec944d8ef7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.965215] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 9cf3ca28-443f-4e06-9f04-103b5b6cddd4/9cf3ca28-443f-4e06-9f04-103b5b6cddd4.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.966630] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de6f3ade-7d16-4ea7-8d60-e7cfbe38e652 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.988143] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0bfbc7-b64a-4f8d-977d-c020f8a561ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.005570] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 942.005570] env[62204]: value = "task-1200114" [ 942.005570] env[62204]: _type = "Task" [ 942.005570] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.024612] env[62204]: DEBUG nova.compute.manager [req-14e9ae5d-6918-4d49-9437-2402c234fb33 req-db45a0db-32f6-464b-9e4d-48168b467361 service nova] [instance: 2b728904-19ef-4773-9260-c615da522801] Detach interface failed, port_id=07d4d455-7847-4302-9485-d456629057e7, reason: Instance 2b728904-19ef-4773-9260-c615da522801 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 942.028443] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200114, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.153436] env[62204]: DEBUG nova.compute.utils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 942.159589] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 942.159589] env[62204]: DEBUG nova.network.neutron [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 942.193185] env[62204]: DEBUG oslo_concurrency.lockutils [None req-137be325-8116-4e9f-b919-2c91c171acc3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "0a4a432d-a71a-4da7-be90-25dcec5a64c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.540s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.229489] env[62204]: DEBUG nova.policy [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '052e8b58b8554c02a492ef696d6057bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56643ee7896c48bf9be3dd1cb1c9fc80', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 942.232309] env[62204]: DEBUG nova.network.neutron [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updated VIF entry in instance network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 942.232410] env[62204]: DEBUG nova.network.neutron [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.276014] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200113, 'name': Rename_Task, 'duration_secs': 0.268641} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.276437] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.276544] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44b26f4d-1e1b-4ad2-9883-875681fc4a4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.286201] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 942.286201] env[62204]: value = "task-1200115" [ 942.286201] env[62204]: _type = "Task" [ 942.286201] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.298392] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200115, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.390798] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52000bf4-7e38-c3db-c54c-3fd6572961d7, 'name': SearchDatastore_Task, 'duration_secs': 0.017569} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.391165] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.391410] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.391652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.391803] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.391980] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.392270] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53715c27-6d06-4e39-bae0-e7c0df0fe5a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.397490] env[62204]: INFO nova.compute.manager [-] [instance: 2b728904-19ef-4773-9260-c615da522801] Took 1.28 seconds to deallocate network for instance. [ 942.411661] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.411661] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.412131] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1ed4515-8336-44e6-be62-b9da6c5af9e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.417881] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 942.417881] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52856163-61db-7fd7-52a2-9c36932eefc4" [ 942.417881] env[62204]: _type = "Task" [ 942.417881] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.426150] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52856163-61db-7fd7-52a2-9c36932eefc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.518252] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200114, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.554408] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed9946c-dcd8-4819-b509-a5b7f2ddaa34 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.564277] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38057ee-9352-489d-b021-ea7482c4a2f6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.606124] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81204d7-1cd7-428e-ba6a-78ad032a72e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.614119] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d702ade-870b-4fa2-a876-c8b74c701b6c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.627641] env[62204]: DEBUG nova.compute.provider_tree [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.658924] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 942.736525] env[62204]: DEBUG oslo_concurrency.lockutils [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.737365] env[62204]: DEBUG nova.compute.manager [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-changed-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 942.737365] env[62204]: DEBUG nova.compute.manager [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing instance network info cache due to event network-changed-4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 942.737365] env[62204]: DEBUG oslo_concurrency.lockutils [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.737365] env[62204]: DEBUG oslo_concurrency.lockutils [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.737577] env[62204]: DEBUG nova.network.neutron [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 942.795860] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200115, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.903394] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.927138] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52856163-61db-7fd7-52a2-9c36932eefc4, 'name': SearchDatastore_Task, 'duration_secs': 0.036457} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.927970] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7063e6aa-8597-4dde-a0bb-20b92da3075c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.936075] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 942.936075] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52987af7-417f-af47-abb6-7b27308151b7" [ 942.936075] env[62204]: _type = "Task" [ 942.936075] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.944491] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52987af7-417f-af47-abb6-7b27308151b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.021723] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200114, 'name': ReconfigVM_Task, 'duration_secs': 0.813837} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.023760] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cbfb95-eb03-770d-20af-04c6b9ce9a98/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 943.024205] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 9cf3ca28-443f-4e06-9f04-103b5b6cddd4/9cf3ca28-443f-4e06-9f04-103b5b6cddd4.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 943.025533] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9436c81e-02ef-4132-bbbd-dcc1cac09508 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.028752] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5ee2acb-ef3c-49c1-aef4-266e52d28d43 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.073066] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cbfb95-eb03-770d-20af-04c6b9ce9a98/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 943.073066] env[62204]: ERROR oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cbfb95-eb03-770d-20af-04c6b9ce9a98/disk-0.vmdk due to incomplete transfer. [ 943.073066] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8ea5269c-b410-4574-a0bf-60b8b9a8752e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.073066] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 943.073066] env[62204]: value = "task-1200116" [ 943.073066] env[62204]: _type = "Task" [ 943.073066] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.073066] env[62204]: DEBUG oslo_vmware.rw_handles [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52cbfb95-eb03-770d-20af-04c6b9ce9a98/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 943.073066] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Uploaded image 64aeea2b-e127-4ab2-abff-027b5881ee9a to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 943.073066] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 943.073066] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e3fd6f5f-9009-4e4b-98b0-b0a3f742d3f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.073066] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200116, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.073066] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 943.073066] env[62204]: value = "task-1200117" [ 943.073066] env[62204]: _type = "Task" [ 943.073066] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.073066] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200117, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.131339] env[62204]: DEBUG nova.scheduler.client.report [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 943.226253] env[62204]: DEBUG nova.network.neutron [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Successfully created port: 3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.296803] env[62204]: DEBUG oslo_vmware.api [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200115, 'name': PowerOnVM_Task, 'duration_secs': 0.780891} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.298037] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.298037] env[62204]: INFO nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Took 16.38 seconds to spawn the instance on the hypervisor. [ 943.298037] env[62204]: DEBUG nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 943.298415] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626bdb45-1ae9-488d-b179-94b0839dc516 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.449198] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52987af7-417f-af47-abb6-7b27308151b7, 'name': SearchDatastore_Task, 'duration_secs': 0.019953} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.449890] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.450225] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d43dafa1-770f-4455-a3d8-9d08742b1fb6/d43dafa1-770f-4455-a3d8-9d08742b1fb6.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.450551] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f69f4e9f-ea2f-4be0-8cd1-fb39d4be586f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.459384] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 943.459384] env[62204]: value = "task-1200118" [ 943.459384] env[62204]: _type = "Task" [ 943.459384] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.469873] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.501560] env[62204]: DEBUG nova.network.neutron [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updated VIF entry in instance network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 943.502015] env[62204]: DEBUG nova.network.neutron [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.547767] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200116, 'name': Rename_Task, 'duration_secs': 0.184936} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.548030] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 943.548284] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da131d0d-f5cb-48e4-b46d-e3c05d83dcaf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.554156] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 943.554156] env[62204]: value = "task-1200119" [ 943.554156] env[62204]: _type = "Task" [ 943.554156] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.569166] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.574471] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200117, 'name': Destroy_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.670525] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 943.695513] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 943.695763] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 943.695922] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 943.696124] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 943.696277] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 943.696464] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 943.696923] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 943.697151] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 943.697374] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 943.697607] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 943.697826] env[62204]: DEBUG nova.virt.hardware [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 943.698997] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd7cb91-292a-46d0-9877-7ef7802f6353 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.709860] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ffcc93e-b0b9-4a68-bd0d-59a7dc27bcaf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.747814] env[62204]: DEBUG nova.compute.manager [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-changed-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.748028] env[62204]: DEBUG nova.compute.manager [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing instance network info cache due to event network-changed-4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 943.748240] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.819516] env[62204]: INFO nova.compute.manager [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Took 35.11 seconds to build instance. [ 943.971082] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200118, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.005071] env[62204]: DEBUG oslo_concurrency.lockutils [req-100b3ecc-c61f-4693-838f-ca4cd898f870 req-d6a8c051-b6f2-4378-93c7-39eadab3465c service nova] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.005626] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.005865] env[62204]: DEBUG nova.network.neutron [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 944.052999] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.053246] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.069157] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200119, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.073620] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200117, 'name': Destroy_Task, 'duration_secs': 0.784724} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.073914] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Destroyed the VM [ 944.074182] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 944.074461] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4088d53a-9618-440b-b097-a738e10ffae6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.082591] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 944.082591] env[62204]: value = "task-1200120" [ 944.082591] env[62204]: _type = "Task" [ 944.082591] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.091481] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200120, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.146581] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.501s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.149869] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.316s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.150180] env[62204]: DEBUG nova.objects.instance [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lazy-loading 'resources' on Instance uuid 7412d7ef-b370-4253-8d57-d2bd5d06d6a9 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.322169] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe1397a2-6a6f-46a2-9da1-514d4425dd6e tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.615s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.435573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.435839] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.470936] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904232} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.471252] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d43dafa1-770f-4455-a3d8-9d08742b1fb6/d43dafa1-770f-4455-a3d8-9d08742b1fb6.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.471475] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.471784] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e8181085-74b8-4d96-8afd-564ea2deb31a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.479065] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 944.479065] env[62204]: value = "task-1200121" [ 944.479065] env[62204]: _type = "Task" [ 944.479065] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.488106] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.557392] env[62204]: DEBUG nova.compute.utils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 944.572409] env[62204]: DEBUG oslo_vmware.api [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200119, 'name': PowerOnVM_Task, 'duration_secs': 0.954452} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.572713] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 944.572974] env[62204]: INFO nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Took 10.77 seconds to spawn the instance on the hypervisor. [ 944.573203] env[62204]: DEBUG nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 944.573981] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9194d745-4dac-40a3-b42c-bee81b3259c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.594205] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200120, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.698852] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.699125] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.699337] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.699524] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.699699] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.706105] env[62204]: INFO nova.compute.manager [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Terminating instance [ 944.712459] env[62204]: DEBUG nova.compute.manager [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 944.712683] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.713540] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e30df28-22c8-4da5-b517-95e798df0b29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.717270] env[62204]: INFO nova.scheduler.client.report [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocation for migration 1ece4807-e6af-4d6e-b887-100f22af2351 [ 944.726809] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.727128] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-610c43d6-3a55-4df9-96d9-9125a7743e15 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.732398] env[62204]: DEBUG oslo_vmware.api [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 944.732398] env[62204]: value = "task-1200122" [ 944.732398] env[62204]: _type = "Task" [ 944.732398] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.742867] env[62204]: DEBUG oslo_vmware.api [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200122, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.750875] env[62204]: DEBUG nova.network.neutron [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updated VIF entry in instance network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 944.751317] env[62204]: DEBUG nova.network.neutron [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.937799] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 944.952911] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4e6911-72ac-4b7c-a9c9-1df59eb967b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.961695] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2041b1d3-7fb9-431c-8d01-9d233b4f044d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.996669] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74091670-9794-4d2c-8784-b2bc3ea51711 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.009122] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06982} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.010662] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7d2cde-c949-453d-a72f-df9e166f7911 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.014502] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.016402] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881746e5-fadc-4208-90c2-b75b5f3c69c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.029481] env[62204]: DEBUG nova.compute.provider_tree [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.048316] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] d43dafa1-770f-4455-a3d8-9d08742b1fb6/d43dafa1-770f-4455-a3d8-9d08742b1fb6.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.050090] env[62204]: DEBUG nova.scheduler.client.report [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 945.053441] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23aeef03-d2fb-4cce-807f-09dbc2a4976a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.070994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.920s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.072810] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.019s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.073414] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.746s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.073669] env[62204]: DEBUG nova.objects.instance [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lazy-loading 'resources' on Instance uuid f445a8ea-ff21-44e9-8389-231a03c51650 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.083474] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 945.083474] env[62204]: value = "task-1200123" [ 945.083474] env[62204]: _type = "Task" [ 945.083474] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.103441] env[62204]: INFO nova.scheduler.client.report [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Deleted allocations for instance 7412d7ef-b370-4253-8d57-d2bd5d06d6a9 [ 945.103441] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200123, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.103659] env[62204]: INFO nova.compute.manager [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Took 23.82 seconds to build instance. [ 945.112391] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200120, 'name': RemoveSnapshot_Task, 'duration_secs': 0.967289} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.113522] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 945.113958] env[62204]: DEBUG nova.compute.manager [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 945.114800] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e355db6-744f-40ff-b318-64978a95603e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.137894] env[62204]: DEBUG nova.network.neutron [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Successfully updated port: 3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.228054] env[62204]: DEBUG oslo_concurrency.lockutils [None req-24b7817f-6420-4fbb-8adc-8d32c24d9c92 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 14.920s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.242098] env[62204]: DEBUG oslo_vmware.api [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200122, 'name': PowerOffVM_Task, 'duration_secs': 0.478851} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.242389] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.242568] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.242816] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-100fcd3d-57bb-409c-a953-62561e58387f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.253872] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.254130] env[62204]: DEBUG nova.compute.manager [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.254311] env[62204]: DEBUG nova.compute.manager [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing instance network info cache due to event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.254546] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.254700] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.254867] env[62204]: DEBUG nova.network.neutron [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 945.330052] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.330453] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.330652] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleting the datastore file [datastore1] 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.330970] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5dad7e6-380d-4ee3-ab27-1f8e91824ec4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.338048] env[62204]: DEBUG oslo_vmware.api [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 945.338048] env[62204]: value = "task-1200125" [ 945.338048] env[62204]: _type = "Task" [ 945.338048] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.345332] env[62204]: DEBUG oslo_vmware.api [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.461416] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.598021] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200123, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.615791] env[62204]: DEBUG oslo_concurrency.lockutils [None req-50328354-8a39-4129-b0cb-67ecaeaa0b57 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.346s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.616854] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f400c066-8d2f-47e2-bee1-8178ae6aa2bd tempest-ListImageFiltersTestJSON-1462067876 tempest-ListImageFiltersTestJSON-1462067876-project-member] Lock "7412d7ef-b370-4253-8d57-d2bd5d06d6a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.745s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.630141] env[62204]: INFO nova.compute.manager [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Shelve offloading [ 945.631986] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.632288] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bab2c4c-9c86-4f0c-b445-435a5bcf948f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.642888] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.644102] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.644102] env[62204]: DEBUG nova.network.neutron [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.645758] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 945.645758] env[62204]: value = "task-1200126" [ 945.645758] env[62204]: _type = "Task" [ 945.645758] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.664645] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 945.664865] env[62204]: DEBUG nova.compute.manager [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 945.666477] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e18653-0040-4e2d-b0f4-2d3b0ef29b4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.672994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.673178] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.674099] env[62204]: DEBUG nova.network.neutron [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 945.850203] env[62204]: DEBUG oslo_vmware.api [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289513} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.850497] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.850693] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.850878] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.851119] env[62204]: INFO nova.compute.manager [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 945.851374] env[62204]: DEBUG oslo.service.loopingcall [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.851569] env[62204]: DEBUG nova.compute.manager [-] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 945.851662] env[62204]: DEBUG nova.network.neutron [-] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 945.881032] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44464a54-5612-4494-be7f-da50c3bc7bca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.888609] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec20f9e-7cd1-42b1-b692-dd182da49634 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.925566] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7eeee3-3681-4884-adaf-6e28ff7f315c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.930506] env[62204]: DEBUG nova.compute.manager [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Received event network-vif-plugged-3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.930732] env[62204]: DEBUG oslo_concurrency.lockutils [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.930944] env[62204]: DEBUG oslo_concurrency.lockutils [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.931122] env[62204]: DEBUG oslo_concurrency.lockutils [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.931409] env[62204]: DEBUG nova.compute.manager [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] No waiting events found dispatching network-vif-plugged-3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 945.931473] env[62204]: WARNING nova.compute.manager [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Received unexpected event network-vif-plugged-3874d0d3-36f6-4cab-a204-a05bf0fb54ac for instance with vm_state building and task_state spawning. [ 945.931604] env[62204]: DEBUG nova.compute.manager [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Received event network-changed-3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.931759] env[62204]: DEBUG nova.compute.manager [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Refreshing instance network info cache due to event network-changed-3874d0d3-36f6-4cab-a204-a05bf0fb54ac. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 945.931924] env[62204]: DEBUG oslo_concurrency.lockutils [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] Acquiring lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 945.942761] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01256359-3443-4acc-ab39-55d7f4b7c227 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.956658] env[62204]: DEBUG nova.compute.provider_tree [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.095724] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200123, 'name': ReconfigVM_Task, 'duration_secs': 0.729118} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.096061] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Reconfigured VM instance instance-0000005c to attach disk [datastore1] d43dafa1-770f-4455-a3d8-9d08742b1fb6/d43dafa1-770f-4455-a3d8-9d08742b1fb6.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.096735] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-546997fb-9cc6-4f96-bd2c-b394d06a600c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.103210] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 946.103210] env[62204]: value = "task-1200127" [ 946.103210] env[62204]: _type = "Task" [ 946.103210] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.111633] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200127, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.138954] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.139269] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.139517] env[62204]: INFO nova.compute.manager [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Attaching volume 6debc6c9-3775-46fa-b3ae-21b56913f95b to /dev/sdb [ 946.150657] env[62204]: DEBUG nova.network.neutron [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updated VIF entry in instance network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 946.151045] env[62204]: DEBUG nova.network.neutron [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.193082] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef19233-0afa-4ec6-8774-34af60884787 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.201861] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff68994b-b1de-4a4c-8a39-1adad37e4a08 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.215600] env[62204]: DEBUG nova.virt.block_device [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating existing volume attachment record: 8fe6e861-982b-41f3-912a-80a585098798 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 946.219832] env[62204]: DEBUG nova.network.neutron [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.229985] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-274285e5-fc23-48b4-b0d6-5a67bc764d78-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.230275] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-274285e5-fc23-48b4-b0d6-5a67bc764d78-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.230654] env[62204]: DEBUG nova.objects.instance [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'flavor' on Instance uuid 274285e5-fc23-48b4-b0d6-5a67bc764d78 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.462681] env[62204]: DEBUG nova.scheduler.client.report [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.501238] env[62204]: DEBUG nova.compute.manager [req-a243616a-33b4-4aa1-ae16-04fabda202c7 req-ed3b1492-2624-4d16-9423-0e464e0d8d3e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Received event network-vif-deleted-5f278903-c85c-4f50-82ed-edfb3fb819c1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 946.501238] env[62204]: INFO nova.compute.manager [req-a243616a-33b4-4aa1-ae16-04fabda202c7 req-ed3b1492-2624-4d16-9423-0e464e0d8d3e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Neutron deleted interface 5f278903-c85c-4f50-82ed-edfb3fb819c1; detaching it from the instance and deleting it from the info cache [ 946.502052] env[62204]: DEBUG nova.network.neutron [req-a243616a-33b4-4aa1-ae16-04fabda202c7 req-ed3b1492-2624-4d16-9423-0e464e0d8d3e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.507190] env[62204]: DEBUG nova.network.neutron [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating instance_info_cache with network_info: [{"id": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "address": "fa:16:3e:0e:ce:ee", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3874d0d3-36", "ovs_interfaceid": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.579898] env[62204]: DEBUG nova.network.neutron [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.615834] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200127, 'name': Rename_Task, 'duration_secs': 0.225941} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.615834] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.615834] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-350dd034-7387-447c-989d-54223c9aa335 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.624691] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 946.624691] env[62204]: value = "task-1200130" [ 946.624691] env[62204]: _type = "Task" [ 946.624691] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.636251] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.657419] env[62204]: DEBUG oslo_concurrency.lockutils [req-f3100444-dd56-4e90-b891-2f46533e78e0 req-6c0c94a5-6340-4200-8808-a7e9dc0ab401 service nova] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.763127] env[62204]: DEBUG nova.network.neutron [-] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.930843] env[62204]: DEBUG nova.objects.instance [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'pci_requests' on Instance uuid 274285e5-fc23-48b4-b0d6-5a67bc764d78 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.971675] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.898s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.974514] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.483s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.976579] env[62204]: INFO nova.compute.claims [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.000210] env[62204]: INFO nova.scheduler.client.report [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Deleted allocations for instance f445a8ea-ff21-44e9-8389-231a03c51650 [ 947.005620] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82bd7f97-1187-432a-8634-a7cd66cb697a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.009254] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.009875] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Instance network_info: |[{"id": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "address": "fa:16:3e:0e:ce:ee", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3874d0d3-36", "ovs_interfaceid": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 947.010982] env[62204]: DEBUG oslo_concurrency.lockutils [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] Acquired lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.011749] env[62204]: DEBUG nova.network.neutron [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Refreshing network info cache for port 3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.012705] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:ce:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '415e68b4-3766-4359-afe2-f8563910d98c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3874d0d3-36f6-4cab-a204-a05bf0fb54ac', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 947.025855] env[62204]: DEBUG oslo.service.loopingcall [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 947.028315] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 947.028673] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e566c4fc-b2de-4ac4-94db-b96dfceac103 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.061374] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4114d972-aa17-4c02-8921-35f996da95ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.075148] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 947.075148] env[62204]: value = "task-1200132" [ 947.075148] env[62204]: _type = "Task" [ 947.075148] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.085492] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.087654] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200132, 'name': CreateVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.100676] env[62204]: DEBUG nova.compute.manager [req-a243616a-33b4-4aa1-ae16-04fabda202c7 req-ed3b1492-2624-4d16-9423-0e464e0d8d3e service nova] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Detach interface failed, port_id=5f278903-c85c-4f50-82ed-edfb3fb819c1, reason: Instance 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 947.136234] env[62204]: DEBUG oslo_vmware.api [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200130, 'name': PowerOnVM_Task, 'duration_secs': 0.463548} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.137243] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.137243] env[62204]: INFO nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Took 9.98 seconds to spawn the instance on the hypervisor. [ 947.137243] env[62204]: DEBUG nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 947.137554] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14afaba-2007-42e2-abfe-6fdb30db9939 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.265443] env[62204]: INFO nova.compute.manager [-] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Took 1.41 seconds to deallocate network for instance. [ 947.433450] env[62204]: DEBUG nova.objects.base [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Object Instance<274285e5-fc23-48b4-b0d6-5a67bc764d78> lazy-loaded attributes: flavor,pci_requests {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 947.433773] env[62204]: DEBUG nova.network.neutron [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 947.436878] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.437742] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677cc4f3-9ef4-4752-b2b0-7b6fd4ce12a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.445466] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 947.445723] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1302282e-2c47-4e10-930a-5b033784d22c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.512062] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cef5fb9d-b07e-4544-bc2e-74957629675e tempest-ServersV294TestFqdnHostnames-886913025 tempest-ServersV294TestFqdnHostnames-886913025-project-member] Lock "f445a8ea-ff21-44e9-8389-231a03c51650" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.427s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.516687] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 947.516904] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 947.517110] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleting the datastore file [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 947.517367] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df1e2956-e602-4095-b955-a1f19c10e316 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.523834] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 947.523834] env[62204]: value = "task-1200134" [ 947.523834] env[62204]: _type = "Task" [ 947.523834] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.535626] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200134, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.545755] env[62204]: DEBUG nova.policy [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.585343] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200132, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.654558] env[62204]: INFO nova.compute.manager [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Took 26.03 seconds to build instance. [ 947.695721] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.695942] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.696168] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.696357] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.696569] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.698587] env[62204]: INFO nova.compute.manager [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Terminating instance [ 947.700292] env[62204]: DEBUG nova.compute.manager [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 947.700487] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.701562] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69186bb5-1941-4845-8f9e-de1a347c3cdf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.709800] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.710199] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab749ae8-4a3f-43a5-8de7-041c88f374e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.716339] env[62204]: DEBUG oslo_vmware.api [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 947.716339] env[62204]: value = "task-1200135" [ 947.716339] env[62204]: _type = "Task" [ 947.716339] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.725629] env[62204]: DEBUG oslo_vmware.api [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.772047] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.006035] env[62204]: DEBUG nova.network.neutron [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updated VIF entry in instance network info cache for port 3874d0d3-36f6-4cab-a204-a05bf0fb54ac. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 948.006696] env[62204]: DEBUG nova.network.neutron [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating instance_info_cache with network_info: [{"id": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "address": "fa:16:3e:0e:ce:ee", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3874d0d3-36", "ovs_interfaceid": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.037766] env[62204]: DEBUG oslo_vmware.api [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200134, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401978} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.038064] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.038273] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.039502] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.055136] env[62204]: INFO nova.scheduler.client.report [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted allocations for instance c0990e53-70c9-4536-b26a-bc00bd457c56 [ 948.091185] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200132, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.156949] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b5f10f71-8fd3-48da-b82c-b88fb1c0d5ad tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.539s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.229033] env[62204]: DEBUG oslo_vmware.api [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200135, 'name': PowerOffVM_Task, 'duration_secs': 0.249951} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.229349] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.229533] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.229803] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e891a71-18ad-40ec-b328-8d804ed12739 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.269659] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2aa122-a6c9-4b52-ba50-c663d6b1d799 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.278243] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe0d468-d806-43cc-9275-8d68ce469239 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.312101] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143acbc6-be09-4036-b094-41552cc64fee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.321313] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c384c1a-9c66-4097-b762-e428f1036977 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.327072] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.327072] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.327226] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleting the datastore file [datastore2] 7b7032a8-8093-43fb-b2e2-c6308d96e819 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.327854] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06ed641e-5641-44bb-9811-d372c820abd3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.337856] env[62204]: DEBUG nova.compute.provider_tree [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.341278] env[62204]: DEBUG nova.compute.manager [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-vif-unplugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.341494] env[62204]: DEBUG oslo_concurrency.lockutils [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.341701] env[62204]: DEBUG oslo_concurrency.lockutils [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.342475] env[62204]: DEBUG oslo_concurrency.lockutils [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 948.342475] env[62204]: DEBUG nova.compute.manager [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] No waiting events found dispatching network-vif-unplugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 948.342475] env[62204]: WARNING nova.compute.manager [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received unexpected event network-vif-unplugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 for instance with vm_state shelved_offloaded and task_state None. [ 948.342677] env[62204]: DEBUG nova.compute.manager [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-changed-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.342871] env[62204]: DEBUG nova.compute.manager [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Refreshing instance network info cache due to event network-changed-cb48dbbb-646f-445c-89d1-8c4a9e36de59. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.343169] env[62204]: DEBUG oslo_concurrency.lockutils [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.343365] env[62204]: DEBUG oslo_concurrency.lockutils [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.343606] env[62204]: DEBUG nova.network.neutron [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Refreshing network info cache for port cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 948.346635] env[62204]: DEBUG oslo_vmware.api [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 948.346635] env[62204]: value = "task-1200137" [ 948.346635] env[62204]: _type = "Task" [ 948.346635] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.357297] env[62204]: DEBUG oslo_vmware.api [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.509744] env[62204]: DEBUG oslo_concurrency.lockutils [req-8c87b119-a8f7-4130-9e2d-9ca431020c14 req-b1bb8b7d-7012-4549-9221-aaffb277b364 service nova] Releasing lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.561935] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.589847] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200132, 'name': CreateVM_Task, 'duration_secs': 1.268302} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.589847] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 948.590264] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.590438] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.590742] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 948.591741] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe986d4-e769-4a90-bb7a-29daa9eeb7bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.595893] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 948.595893] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5266b8c6-81ec-f381-1a10-08534fb8d0ec" [ 948.595893] env[62204]: _type = "Task" [ 948.595893] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.604057] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5266b8c6-81ec-f381-1a10-08534fb8d0ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.708456] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2178b629-4be6-473b-9a75-19efa234d442" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 948.708701] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 948.847607] env[62204]: DEBUG nova.scheduler.client.report [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 948.867483] env[62204]: DEBUG oslo_vmware.api [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.381111} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.867483] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.867483] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.867483] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.867483] env[62204]: INFO nova.compute.manager [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Took 1.16 seconds to destroy the instance on the hypervisor. [ 948.867483] env[62204]: DEBUG oslo.service.loopingcall [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 948.867483] env[62204]: DEBUG nova.compute.manager [-] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 948.867483] env[62204]: DEBUG nova.network.neutron [-] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 949.083411] env[62204]: DEBUG nova.network.neutron [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updated VIF entry in instance network info cache for port cb48dbbb-646f-445c-89d1-8c4a9e36de59. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 949.083787] env[62204]: DEBUG nova.network.neutron [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.108199] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5266b8c6-81ec-f381-1a10-08534fb8d0ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009692} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.109245] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.109485] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 949.109760] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.109864] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.110137] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 949.110655] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2066083-ca45-4484-8b76-b7476fcd77da {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.121694] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 949.121899] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 949.122663] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62b8c36f-d696-4a5b-a4c4-4f5a812dc6c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.129032] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 949.129032] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ec2327-12bd-c493-a2cf-02e106b0d2a6" [ 949.129032] env[62204]: _type = "Task" [ 949.129032] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.137519] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ec2327-12bd-c493-a2cf-02e106b0d2a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.212800] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 949.226555] env[62204]: DEBUG nova.compute.manager [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Received event network-changed-444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.226555] env[62204]: DEBUG nova.compute.manager [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Refreshing instance network info cache due to event network-changed-444f8f64-f9a6-4e48-ba55-fe1b6be68af2. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 949.226555] env[62204]: DEBUG oslo_concurrency.lockutils [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] Acquiring lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 949.226713] env[62204]: DEBUG oslo_concurrency.lockutils [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] Acquired lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.226879] env[62204]: DEBUG nova.network.neutron [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Refreshing network info cache for port 444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 949.357679] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.358220] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 949.365119] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.906s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.366827] env[62204]: INFO nova.compute.claims [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.586622] env[62204]: DEBUG oslo_concurrency.lockutils [req-bd96484b-b9f5-483d-a3cc-841f79c9d1b5 req-0f848f92-bc6b-4ef6-9dd9-af40ae49aa46 service nova] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.613807] env[62204]: DEBUG nova.network.neutron [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Successfully updated port: 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.642459] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ec2327-12bd-c493-a2cf-02e106b0d2a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009857} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.643616] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb73db1-ceaf-430d-9eb3-cb3e716bd9a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.651920] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 949.651920] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ed2f3d-d432-5386-e2e5-215e2a513ff8" [ 949.651920] env[62204]: _type = "Task" [ 949.651920] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.668870] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ed2f3d-d432-5386-e2e5-215e2a513ff8, 'name': SearchDatastore_Task, 'duration_secs': 0.011745} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.669017] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.669288] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 21056adb-d81e-45bd-b354-1bcb488d2ed9/21056adb-d81e-45bd-b354-1bcb488d2ed9.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.669844] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-189d5a31-058a-4ac0-a416-888a4f9023a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.678108] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 949.678108] env[62204]: value = "task-1200139" [ 949.678108] env[62204]: _type = "Task" [ 949.678108] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.683015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.684065] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.684297] env[62204]: INFO nova.compute.manager [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Shelving [ 949.690410] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200139, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.770296] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.776969] env[62204]: DEBUG nova.network.neutron [-] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.878910] env[62204]: DEBUG nova.compute.utils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 949.883775] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 949.884495] env[62204]: DEBUG nova.network.neutron [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 949.988626] env[62204]: DEBUG nova.policy [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6db3ba1bb9b9464d870969f1f7d95a9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ef8dc436e4b45d0a8d50468666358e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 950.089526] env[62204]: DEBUG nova.network.neutron [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Updated VIF entry in instance network info cache for port 444f8f64-f9a6-4e48-ba55-fe1b6be68af2. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 950.090116] env[62204]: DEBUG nova.network.neutron [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Updating instance_info_cache with network_info: [{"id": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "address": "fa:16:3e:87:a8:90", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap444f8f64-f9", "ovs_interfaceid": "444f8f64-f9a6-4e48-ba55-fe1b6be68af2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.125408] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.125408] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.125408] env[62204]: DEBUG nova.network.neutron [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 950.190603] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200139, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.195620] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.195905] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4015b866-4c77-4357-8199-0cf450d39ae4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.206141] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 950.206141] env[62204]: value = "task-1200140" [ 950.206141] env[62204]: _type = "Task" [ 950.206141] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.217674] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200140, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.279587] env[62204]: INFO nova.compute.manager [-] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Took 1.41 seconds to deallocate network for instance. [ 950.388192] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 950.417533] env[62204]: DEBUG nova.compute.manager [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-vif-plugged-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.417789] env[62204]: DEBUG oslo_concurrency.lockutils [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.417983] env[62204]: DEBUG oslo_concurrency.lockutils [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.421023] env[62204]: DEBUG oslo_concurrency.lockutils [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.421023] env[62204]: DEBUG nova.compute.manager [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] No waiting events found dispatching network-vif-plugged-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 950.421023] env[62204]: WARNING nova.compute.manager [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received unexpected event network-vif-plugged-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 for instance with vm_state active and task_state None. [ 950.421023] env[62204]: DEBUG nova.compute.manager [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-changed-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.421023] env[62204]: DEBUG nova.compute.manager [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing instance network info cache due to event network-changed-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 950.421023] env[62204]: DEBUG oslo_concurrency.lockutils [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.594618] env[62204]: DEBUG oslo_concurrency.lockutils [req-379e33ed-214d-4133-9b40-c387a72bebc7 req-0a6885aa-1c75-40ae-9b23-0158fcab05ec service nova] Releasing lock "refresh_cache-9cf3ca28-443f-4e06-9f04-103b5b6cddd4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 950.696019] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200139, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652772} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.696910] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 21056adb-d81e-45bd-b354-1bcb488d2ed9/21056adb-d81e-45bd-b354-1bcb488d2ed9.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.696910] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.697515] env[62204]: WARNING nova.network.neutron [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] 3b20dcf5-a239-493a-bd84-9815cabea48a already exists in list: networks containing: ['3b20dcf5-a239-493a-bd84-9815cabea48a']. ignoring it [ 950.699750] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c54a57ae-2a0b-4df6-8a86-57a8ebd72b86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.707748] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 950.707748] env[62204]: value = "task-1200141" [ 950.707748] env[62204]: _type = "Task" [ 950.707748] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.722548] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200140, 'name': PowerOffVM_Task, 'duration_secs': 0.338546} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.725519] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.725901] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.727692] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7adad4-1e56-4f5b-818c-26b1ee7d12e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.753324] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b35612-9317-4627-b43f-337c5149ada3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.766450] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446421dd-9a82-430f-acb9-fb4b4846879f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.775780] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289617a7-c8e4-4bf5-b77c-c30f2a5d09ab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.782632] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 950.782866] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260108', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'name': 'volume-6debc6c9-3775-46fa-b3ae-21b56913f95b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd97d792d-614f-42e3-8516-6c0a7cf15ad5', 'attached_at': '', 'detached_at': '', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'serial': '6debc6c9-3775-46fa-b3ae-21b56913f95b'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 950.783719] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ce965d-1824-4c04-ae2a-2fcdf93ea4f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.816295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 950.820150] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f73023-7231-4c43-8de0-2518ca563956 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.835650] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8af9d8-fb76-49df-9d59-f4c9b92b72ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.842040] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19ba464-58a7-44aa-abff-8cb2d9490ada {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.866617] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] volume-6debc6c9-3775-46fa-b3ae-21b56913f95b/volume-6debc6c9-3775-46fa-b3ae-21b56913f95b.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.867344] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-553cd50a-dccd-45c4-beef-ef13d1b5303f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.891734] env[62204]: DEBUG nova.compute.provider_tree [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.901552] env[62204]: DEBUG oslo_vmware.api [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 950.901552] env[62204]: value = "task-1200142" [ 950.901552] env[62204]: _type = "Task" [ 950.901552] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.927128] env[62204]: DEBUG oslo_vmware.api [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200142, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.976952] env[62204]: DEBUG nova.network.neutron [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Successfully created port: ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.191776] env[62204]: DEBUG nova.network.neutron [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "address": "fa:16:3e:e6:ae:0d", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c5577ec-23", "ovs_interfaceid": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.221561] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067927} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.221866] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 951.222807] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd4e1df-7bd1-42ef-94b1-b4888538f3d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.247064] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] 21056adb-d81e-45bd-b354-1bcb488d2ed9/21056adb-d81e-45bd-b354-1bcb488d2ed9.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 951.247884] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a03cad64-cb4a-4b05-9123-f431a9f17e3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.264581] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 951.264968] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2c2e55e9-6760-4742-918a-f88d30fb3d05 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.268102] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 951.268102] env[62204]: value = "task-1200143" [ 951.268102] env[62204]: _type = "Task" [ 951.268102] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.272145] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 951.272145] env[62204]: value = "task-1200144" [ 951.272145] env[62204]: _type = "Task" [ 951.272145] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.278722] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200143, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.283508] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200144, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.395533] env[62204]: DEBUG nova.scheduler.client.report [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 951.403960] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 951.408668] env[62204]: DEBUG nova.compute.manager [req-0874aab5-2d4d-471d-9f52-517a871d5b79 req-262bdfec-2c1c-410f-9df6-f29d0cc50336 service nova] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Received event network-vif-deleted-830a7992-393b-4d36-82d8-b660d6904ae7 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 951.424681] env[62204]: DEBUG oslo_vmware.api [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200142, 'name': ReconfigVM_Task, 'duration_secs': 0.364136} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.425191] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfigured VM instance instance-00000055 to attach disk [datastore2] volume-6debc6c9-3775-46fa-b3ae-21b56913f95b/volume-6debc6c9-3775-46fa-b3ae-21b56913f95b.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.433632] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18e0d457-e99d-4b63-8b96-f63a8af5bd78 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.463872] env[62204]: DEBUG oslo_vmware.api [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 951.463872] env[62204]: value = "task-1200145" [ 951.463872] env[62204]: _type = "Task" [ 951.463872] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.469794] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.470206] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.470206] env[62204]: DEBUG nova.virt.hardware [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.472497] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eea5a7a-5225-4650-995a-d365a633d298 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.492239] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6fbbe1-76ac-4d56-a787-61eddd4410b2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.495992] env[62204]: DEBUG oslo_vmware.api [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.627382] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 951.696715] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.697645] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.697895] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.698235] env[62204]: DEBUG oslo_concurrency.lockutils [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.698443] env[62204]: DEBUG nova.network.neutron [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing network info cache for port 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 951.700483] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce55890-94b6-4c03-89e1-79610fd176a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.719873] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 951.720121] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 951.720282] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.720462] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 951.720604] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.720746] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 951.720945] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 951.721118] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 951.721283] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 951.721451] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 951.721621] env[62204]: DEBUG nova.virt.hardware [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.732491] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Reconfiguring VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 951.733676] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcebb5e3-efe3-4f50-b985-547c915921f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.754848] env[62204]: DEBUG oslo_vmware.api [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 951.754848] env[62204]: value = "task-1200146" [ 951.754848] env[62204]: _type = "Task" [ 951.754848] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.764328] env[62204]: DEBUG oslo_vmware.api [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200146, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.778989] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200143, 'name': ReconfigVM_Task, 'duration_secs': 0.319951} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.779655] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfigured VM instance instance-0000005d to attach disk [datastore2] 21056adb-d81e-45bd-b354-1bcb488d2ed9/21056adb-d81e-45bd-b354-1bcb488d2ed9.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.780465] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fa17e69-02b1-4d50-b51e-e46d206c72b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.786318] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200144, 'name': CreateSnapshot_Task, 'duration_secs': 0.479981} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.786981] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 951.787798] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b84256-0d83-4b56-9d0d-90cb65335881 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.791855] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 951.791855] env[62204]: value = "task-1200147" [ 951.791855] env[62204]: _type = "Task" [ 951.791855] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.807477] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200147, 'name': Rename_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.911175] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.911778] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 951.916160] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.012s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 951.916160] env[62204]: DEBUG nova.objects.instance [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lazy-loading 'resources' on Instance uuid 2b728904-19ef-4773-9260-c615da522801 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.982765] env[62204]: DEBUG oslo_vmware.api [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200145, 'name': ReconfigVM_Task, 'duration_secs': 0.165446} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.983109] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260108', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'name': 'volume-6debc6c9-3775-46fa-b3ae-21b56913f95b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd97d792d-614f-42e3-8516-6c0a7cf15ad5', 'attached_at': '', 'detached_at': '', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'serial': '6debc6c9-3775-46fa-b3ae-21b56913f95b'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 952.271028] env[62204]: DEBUG oslo_vmware.api [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200146, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.302308] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200147, 'name': Rename_Task, 'duration_secs': 0.228868} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.302619] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.302888] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-772195b7-0503-45c7-8283-398480dcffd6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.311011] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 952.311371] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ac0ba4ee-02c9-436c-ad57-3a1b95bc82ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.319789] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 952.319789] env[62204]: value = "task-1200149" [ 952.319789] env[62204]: _type = "Task" [ 952.319789] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.321254] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 952.321254] env[62204]: value = "task-1200148" [ 952.321254] env[62204]: _type = "Task" [ 952.321254] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.332323] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200149, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.335630] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200148, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.419599] env[62204]: DEBUG nova.compute.utils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 952.424820] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 952.425131] env[62204]: DEBUG nova.network.neutron [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 952.555782] env[62204]: DEBUG nova.policy [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 952.766158] env[62204]: DEBUG oslo_vmware.api [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200146, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.768971] env[62204]: DEBUG nova.network.neutron [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updated VIF entry in instance network info cache for port 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 952.769171] env[62204]: DEBUG nova.network.neutron [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "address": "fa:16:3e:e6:ae:0d", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c5577ec-23", "ovs_interfaceid": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.786510] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8babe759-8b88-4cac-bfa7-277dbe846699 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.795223] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5aff36-2368-4aef-82b6-11598a4db46d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.835262] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438b0800-3196-4607-906c-6d850deed66d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.842831] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200148, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.847864] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200149, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.849485] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f0cd87-99b1-4f53-95fe-3f7253f8c7ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.863364] env[62204]: DEBUG nova.compute.provider_tree [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.925661] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 952.964696] env[62204]: DEBUG nova.network.neutron [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Successfully updated port: ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.032376] env[62204]: DEBUG nova.objects.instance [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'flavor' on Instance uuid d97d792d-614f-42e3-8516-6c0a7cf15ad5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.258173] env[62204]: DEBUG nova.network.neutron [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Successfully created port: a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.268562] env[62204]: DEBUG oslo_vmware.api [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200146, 'name': ReconfigVM_Task, 'duration_secs': 1.23894} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.269356] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.269696] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Reconfigured VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 953.273736] env[62204]: DEBUG oslo_concurrency.lockutils [req-5b59020d-5496-4076-9adc-24d542607348 req-13343ace-5a38-4b7b-9ed0-4dc04d3e956b service nova] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.341422] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200149, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.344880] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200148, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.368190] env[62204]: DEBUG nova.scheduler.client.report [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 953.469953] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-ddef8de2-530e-4b94-aff1-6f7e410f44fb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.470127] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-ddef8de2-530e-4b94-aff1-6f7e410f44fb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.470299] env[62204]: DEBUG nova.network.neutron [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 953.536735] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e5426ce0-4380-4602-8e6d-54c369cd14f1 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.397s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.780511] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a723d495-7243-4077-8b8b-4c93adea8d4a tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-274285e5-fc23-48b4-b0d6-5a67bc764d78-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.550s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.842055] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200149, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.844920] env[62204]: DEBUG oslo_vmware.api [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200148, 'name': PowerOnVM_Task, 'duration_secs': 1.12446} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.845183] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.845389] env[62204]: INFO nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Took 10.17 seconds to spawn the instance on the hypervisor. [ 953.845569] env[62204]: DEBUG nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 953.846321] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf22d70-6d9b-4a3e-8244-8a14c1eaf23d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.872847] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 953.876802] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.415s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 953.878402] env[62204]: INFO nova.compute.claims [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.901463] env[62204]: INFO nova.scheduler.client.report [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Deleted allocations for instance 2b728904-19ef-4773-9260-c615da522801 [ 953.936880] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 953.960520] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 953.960806] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 953.960973] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.962014] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 953.962309] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.962461] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 953.962722] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 953.962907] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 953.963126] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 953.963335] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 953.963542] env[62204]: DEBUG nova.virt.hardware [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 953.964440] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5508c138-8dc3-42fa-bc1b-66f9424ecf4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.977361] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4de488-8b84-4df7-b9c1-99e3ff030118 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.022514] env[62204]: DEBUG nova.network.neutron [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.056730] env[62204]: DEBUG nova.compute.manager [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Received event network-vif-plugged-ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 954.056984] env[62204]: DEBUG oslo_concurrency.lockutils [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] Acquiring lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.057364] env[62204]: DEBUG oslo_concurrency.lockutils [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.057716] env[62204]: DEBUG oslo_concurrency.lockutils [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.057992] env[62204]: DEBUG nova.compute.manager [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] No waiting events found dispatching network-vif-plugged-ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 954.058290] env[62204]: WARNING nova.compute.manager [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Received unexpected event network-vif-plugged-ca6a6668-486c-47f3-bbb8-5902729c6304 for instance with vm_state building and task_state spawning. [ 954.058511] env[62204]: DEBUG nova.compute.manager [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Received event network-changed-ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 954.058752] env[62204]: DEBUG nova.compute.manager [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Refreshing instance network info cache due to event network-changed-ca6a6668-486c-47f3-bbb8-5902729c6304. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 954.059011] env[62204]: DEBUG oslo_concurrency.lockutils [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] Acquiring lock "refresh_cache-ddef8de2-530e-4b94-aff1-6f7e410f44fb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.062055] env[62204]: INFO nova.compute.manager [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Rescuing [ 954.062351] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.062464] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.062657] env[62204]: DEBUG nova.network.neutron [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 954.202914] env[62204]: DEBUG nova.network.neutron [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Updating instance_info_cache with network_info: [{"id": "ca6a6668-486c-47f3-bbb8-5902729c6304", "address": "fa:16:3e:86:85:97", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca6a6668-48", "ovs_interfaceid": "ca6a6668-486c-47f3-bbb8-5902729c6304", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.343607] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200149, 'name': CloneVM_Task, 'duration_secs': 1.767973} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.343896] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Created linked-clone VM from snapshot [ 954.345059] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7699eb4-9808-4ab4-a2f3-6018b071375a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.353148] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Uploading image 8af26ec1-5c36-47eb-bae1-70ed8da45531 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 954.362775] env[62204]: INFO nova.compute.manager [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Took 24.85 seconds to build instance. [ 954.389730] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 954.389730] env[62204]: value = "vm-260111" [ 954.389730] env[62204]: _type = "VirtualMachine" [ 954.389730] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 954.390254] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-bf25ebe3-6d6e-4c27-aa8c-872ff6e34125 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.398862] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lease: (returnval){ [ 954.398862] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b6c79-8ecb-d885-0b33-b5d1ecb94acc" [ 954.398862] env[62204]: _type = "HttpNfcLease" [ 954.398862] env[62204]: } obtained for exporting VM: (result){ [ 954.398862] env[62204]: value = "vm-260111" [ 954.398862] env[62204]: _type = "VirtualMachine" [ 954.398862] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 954.399115] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the lease: (returnval){ [ 954.399115] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b6c79-8ecb-d885-0b33-b5d1ecb94acc" [ 954.399115] env[62204]: _type = "HttpNfcLease" [ 954.399115] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 954.408402] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 954.408402] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b6c79-8ecb-d885-0b33-b5d1ecb94acc" [ 954.408402] env[62204]: _type = "HttpNfcLease" [ 954.408402] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 954.409507] env[62204]: DEBUG oslo_concurrency.lockutils [None req-90524f0b-5efb-4119-aeec-57be2d8de060 tempest-ServerRescueTestJSON-1234453528 tempest-ServerRescueTestJSON-1234453528-project-member] Lock "2b728904-19ef-4773-9260-c615da522801" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.466s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.672759] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.673145] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.707351] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-ddef8de2-530e-4b94-aff1-6f7e410f44fb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.707708] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance network_info: |[{"id": "ca6a6668-486c-47f3-bbb8-5902729c6304", "address": "fa:16:3e:86:85:97", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca6a6668-48", "ovs_interfaceid": "ca6a6668-486c-47f3-bbb8-5902729c6304", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 954.708238] env[62204]: DEBUG oslo_concurrency.lockutils [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] Acquired lock "refresh_cache-ddef8de2-530e-4b94-aff1-6f7e410f44fb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.708441] env[62204]: DEBUG nova.network.neutron [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Refreshing network info cache for port ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 954.709585] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:85:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca6a6668-486c-47f3-bbb8-5902729c6304', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.720348] env[62204]: DEBUG oslo.service.loopingcall [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.723821] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.724317] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89c0c292-11e2-40c2-9412-3535a8b25a29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.750983] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.750983] env[62204]: value = "task-1200151" [ 954.750983] env[62204]: _type = "Task" [ 954.750983] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.761114] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200151, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.848141] env[62204]: DEBUG nova.network.neutron [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.864550] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d70f5774-14c4-4bd2-9861-6469371fd20c tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.357s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.910469] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 954.910469] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b6c79-8ecb-d885-0b33-b5d1ecb94acc" [ 954.910469] env[62204]: _type = "HttpNfcLease" [ 954.910469] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 954.910796] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 954.910796] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527b6c79-8ecb-d885-0b33-b5d1ecb94acc" [ 954.910796] env[62204]: _type = "HttpNfcLease" [ 954.910796] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 954.911749] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484ff9da-92f9-4b8c-97fc-d4f117ce3cea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.925019] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b04ee8-5c14-1edf-7f43-492b78dade95/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 954.925019] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b04ee8-5c14-1edf-7f43-492b78dade95/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 955.020205] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4e412d3b-1570-4e8e-99f3-b8a0bcb07201 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.175941] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 955.259879] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a6b5ca-a3ab-479e-823c-6f2efc017449 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.265644] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200151, 'name': CreateVM_Task, 'duration_secs': 0.31744} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.266198] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.266975] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.267163] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.267497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 955.267780] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce5ff135-c2b6-40ab-bc88-740abd2a8130 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.272099] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb94482-463c-4dbf-bdbe-da8c036d9754 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.276571] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 955.276571] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528504b6-3302-02c6-6c59-6ad3f55424c8" [ 955.276571] env[62204]: _type = "Task" [ 955.276571] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.305442] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ef0000-0fe8-4d76-b44b-e5121c109a51 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.311837] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528504b6-3302-02c6-6c59-6ad3f55424c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010396} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.312613] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.312811] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.313066] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.313219] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.313437] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.313720] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfdd2037-bb68-4cc3-a220-f5062981bae2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.320776] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd521e1-57f6-4f06-9e81-5bd6b0e924d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.326083] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.326239] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.327513] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc55c858-23ab-4656-8ea6-8ec22aea295d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.340280] env[62204]: DEBUG nova.compute.provider_tree [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.343461] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 955.343461] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521d03ec-01d6-0ef6-77e3-e205d7165236" [ 955.343461] env[62204]: _type = "Task" [ 955.343461] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.351653] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.353887] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521d03ec-01d6-0ef6-77e3-e205d7165236, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.700451] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.760335] env[62204]: DEBUG nova.network.neutron [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Updated VIF entry in instance network info cache for port ca6a6668-486c-47f3-bbb8-5902729c6304. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 955.760739] env[62204]: DEBUG nova.network.neutron [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Updating instance_info_cache with network_info: [{"id": "ca6a6668-486c-47f3-bbb8-5902729c6304", "address": "fa:16:3e:86:85:97", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca6a6668-48", "ovs_interfaceid": "ca6a6668-486c-47f3-bbb8-5902729c6304", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.843192] env[62204]: DEBUG nova.scheduler.client.report [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 955.858954] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521d03ec-01d6-0ef6-77e3-e205d7165236, 'name': SearchDatastore_Task, 'duration_secs': 0.008883} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.861849] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-204af477-9232-4373-beb1-4a55bb5fc992 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.868572] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 955.868572] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52674133-70a3-8ce2-4d60-987526740ae9" [ 955.868572] env[62204]: _type = "Task" [ 955.868572] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.877890] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52674133-70a3-8ce2-4d60-987526740ae9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.881733] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 955.882033] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36bf10d2-8db5-4614-b149-bd38529cdfaa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.889668] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 955.889668] env[62204]: value = "task-1200152" [ 955.889668] env[62204]: _type = "Task" [ 955.889668] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.899123] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200152, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.924860] env[62204]: DEBUG nova.network.neutron [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Successfully updated port: a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.997228] env[62204]: DEBUG nova.compute.manager [req-6c42fc7a-650c-4564-8656-46fde84a9217 req-cd2d270f-d5c0-45b4-ac04-4c81d8950e59 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Received event network-vif-plugged-a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.998364] env[62204]: DEBUG oslo_concurrency.lockutils [req-6c42fc7a-650c-4564-8656-46fde84a9217 req-cd2d270f-d5c0-45b4-ac04-4c81d8950e59 service nova] Acquiring lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.998797] env[62204]: DEBUG oslo_concurrency.lockutils [req-6c42fc7a-650c-4564-8656-46fde84a9217 req-cd2d270f-d5c0-45b4-ac04-4c81d8950e59 service nova] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.999120] env[62204]: DEBUG oslo_concurrency.lockutils [req-6c42fc7a-650c-4564-8656-46fde84a9217 req-cd2d270f-d5c0-45b4-ac04-4c81d8950e59 service nova] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.999417] env[62204]: DEBUG nova.compute.manager [req-6c42fc7a-650c-4564-8656-46fde84a9217 req-cd2d270f-d5c0-45b4-ac04-4c81d8950e59 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] No waiting events found dispatching network-vif-plugged-a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 955.999698] env[62204]: WARNING nova.compute.manager [req-6c42fc7a-650c-4564-8656-46fde84a9217 req-cd2d270f-d5c0-45b4-ac04-4c81d8950e59 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Received unexpected event network-vif-plugged-a6d6e5fe-b756-45fa-9069-cec641c128e1 for instance with vm_state building and task_state spawning. [ 956.266161] env[62204]: DEBUG oslo_concurrency.lockutils [req-851fa076-9295-4954-ab5c-710e94a3659f req-9d7f9780-fe22-4f7a-8d09-f460982bfdc2 service nova] Releasing lock "refresh_cache-ddef8de2-530e-4b94-aff1-6f7e410f44fb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.355030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.355030] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 956.356804] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.585s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.357208] env[62204]: DEBUG nova.objects.instance [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lazy-loading 'resources' on Instance uuid 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.381024] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52674133-70a3-8ce2-4d60-987526740ae9, 'name': SearchDatastore_Task, 'duration_secs': 0.015171} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.381024] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.381024] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.381024] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f415911-a890-480c-9726-65ded8aaa16b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.389020] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 956.389020] env[62204]: value = "task-1200153" [ 956.389020] env[62204]: _type = "Task" [ 956.389020] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.401669] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.405715] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200152, 'name': PowerOffVM_Task, 'duration_secs': 0.274432} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.406217] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 956.407172] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cebdbe-2fdd-47ce-9024-545c63f184de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.431196] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-0a383305-5b3b-4a7d-8834-d31e54eb4ba5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.431902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-0a383305-5b3b-4a7d-8834-d31e54eb4ba5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.432196] env[62204]: DEBUG nova.network.neutron [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 956.435879] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7456bd9c-1db0-4e08-8201-d1f34215fd5c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.478854] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.479765] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66d19bb4-5907-488f-8423-915139264fb0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.488132] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 956.488132] env[62204]: value = "task-1200154" [ 956.488132] env[62204]: _type = "Task" [ 956.488132] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.497820] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 956.498356] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.498926] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.499241] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.499557] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.499960] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf8129b9-5c8b-405b-8cef-b4c11c36b779 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.509077] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.509561] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.510407] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02d22ff9-24c2-4388-b05c-49946e876229 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.515797] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 956.515797] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5266f1a8-e0a2-01eb-dac7-5831680a0104" [ 956.515797] env[62204]: _type = "Task" [ 956.515797] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.524504] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5266f1a8-e0a2-01eb-dac7-5831680a0104, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.769576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-274285e5-fc23-48b4-b0d6-5a67bc764d78-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.769576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-274285e5-fc23-48b4-b0d6-5a67bc764d78-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.862455] env[62204]: DEBUG nova.compute.utils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.870115] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 956.870340] env[62204]: DEBUG nova.network.neutron [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 956.873484] env[62204]: DEBUG nova.compute.manager [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Received event network-changed-3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 956.873867] env[62204]: DEBUG nova.compute.manager [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Refreshing instance network info cache due to event network-changed-3874d0d3-36f6-4cab-a204-a05bf0fb54ac. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 956.874291] env[62204]: DEBUG oslo_concurrency.lockutils [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] Acquiring lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.874360] env[62204]: DEBUG oslo_concurrency.lockutils [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] Acquired lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.875015] env[62204]: DEBUG nova.network.neutron [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Refreshing network info cache for port 3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 956.900184] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200153, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.930280] env[62204]: DEBUG nova.policy [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52fc19cbbaf14319a258f952c739c137', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd93f6aa3eaad4c5b91b657e75854f45f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 957.020116] env[62204]: DEBUG nova.network.neutron [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.036873] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5266f1a8-e0a2-01eb-dac7-5831680a0104, 'name': SearchDatastore_Task, 'duration_secs': 0.020154} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.037826] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4508011-b66c-4342-a177-19caaa624deb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.046817] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 957.046817] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525a60d6-08a7-7ee7-70a9-cc10557ac97f" [ 957.046817] env[62204]: _type = "Task" [ 957.046817] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.056485] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525a60d6-08a7-7ee7-70a9-cc10557ac97f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.259560] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ea3a32-9257-446b-920a-dfac9db96d05 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.271417] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30dfa9f-6a80-491b-9177-f29fc55f13af {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.275262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.275605] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.279019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764dd90e-352b-4fdb-99b1-b47d4202649d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.323018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8616be3e-dfbb-4072-935a-76d3e52894f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.323993] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2fa097-bed5-4f5a-9820-364b822f3c69 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.350989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f5946b-a253-4829-8a33-0e0cd8ba8e35 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.360796] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Reconfiguring VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 957.361199] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-132ca78e-f90b-4631-b380-d9f121a2340c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.375869] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 957.389984] env[62204]: DEBUG nova.compute.provider_tree [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.396798] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 957.396798] env[62204]: value = "task-1200155" [ 957.396798] env[62204]: _type = "Task" [ 957.396798] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.407691] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.412063] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200153, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577122} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.412354] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.412571] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.412834] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5e9d06b-bd3c-450e-b479-d499c4e51aa6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.422180] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 957.422180] env[62204]: value = "task-1200156" [ 957.422180] env[62204]: _type = "Task" [ 957.422180] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.434787] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200156, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.490579] env[62204]: DEBUG nova.network.neutron [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Updating instance_info_cache with network_info: [{"id": "a6d6e5fe-b756-45fa-9069-cec641c128e1", "address": "fa:16:3e:b3:33:d0", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6d6e5fe-b7", "ovs_interfaceid": "a6d6e5fe-b756-45fa-9069-cec641c128e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.560169] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525a60d6-08a7-7ee7-70a9-cc10557ac97f, 'name': SearchDatastore_Task, 'duration_secs': 0.012231} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.560492] env[62204]: DEBUG oslo_concurrency.lockutils [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.560785] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. {{(pid=62204) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 957.561370] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f74ddfe5-2cc9-4a76-b9f9-37fac69cc23e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.569126] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 957.569126] env[62204]: value = "task-1200157" [ 957.569126] env[62204]: _type = "Task" [ 957.569126] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.579952] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.685773] env[62204]: DEBUG nova.network.neutron [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updated VIF entry in instance network info cache for port 3874d0d3-36f6-4cab-a204-a05bf0fb54ac. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 957.686187] env[62204]: DEBUG nova.network.neutron [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating instance_info_cache with network_info: [{"id": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "address": "fa:16:3e:0e:ce:ee", "network": {"id": "2aef21dd-040b-4126-a4d0-e8adef0348c9", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1172091454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56643ee7896c48bf9be3dd1cb1c9fc80", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "415e68b4-3766-4359-afe2-f8563910d98c", "external-id": "nsx-vlan-transportzone-538", "segmentation_id": 538, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3874d0d3-36", "ovs_interfaceid": "3874d0d3-36f6-4cab-a204-a05bf0fb54ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.704512] env[62204]: DEBUG nova.network.neutron [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Successfully created port: a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.904250] env[62204]: DEBUG nova.scheduler.client.report [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 957.924284] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.938498] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200156, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116118} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.938498] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.938889] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085eb5de-bfbd-470d-ad9d-4e5f8e9de4a3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.968991] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.969960] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c249cf13-1c13-488f-b627-25c651b9238f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.993452] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-0a383305-5b3b-4a7d-8834-d31e54eb4ba5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.993842] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Instance network_info: |[{"id": "a6d6e5fe-b756-45fa-9069-cec641c128e1", "address": "fa:16:3e:b3:33:d0", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6d6e5fe-b7", "ovs_interfaceid": "a6d6e5fe-b756-45fa-9069-cec641c128e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 957.996077] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:33:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6d6e5fe-b756-45fa-9069-cec641c128e1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.007459] env[62204]: DEBUG oslo.service.loopingcall [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.008338] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 958.008338] env[62204]: value = "task-1200158" [ 958.008338] env[62204]: _type = "Task" [ 958.008338] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.008573] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.008863] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23b26fc4-5b17-4ec5-af2e-bce05a05269d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.036647] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200158, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.040348] env[62204]: DEBUG nova.compute.manager [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Received event network-changed-a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 958.040646] env[62204]: DEBUG nova.compute.manager [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Refreshing instance network info cache due to event network-changed-a6d6e5fe-b756-45fa-9069-cec641c128e1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 958.040853] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] Acquiring lock "refresh_cache-0a383305-5b3b-4a7d-8834-d31e54eb4ba5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.041035] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] Acquired lock "refresh_cache-0a383305-5b3b-4a7d-8834-d31e54eb4ba5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.041253] env[62204]: DEBUG nova.network.neutron [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Refreshing network info cache for port a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 958.042796] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.042796] env[62204]: value = "task-1200159" [ 958.042796] env[62204]: _type = "Task" [ 958.042796] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.055970] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200159, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.080647] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200157, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.090023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.090023] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.189675] env[62204]: DEBUG oslo_concurrency.lockutils [req-56349194-c31e-4ba0-829b-1982fe16776b req-dc53e4b0-a4a7-4097-a853-231e1bcad510 service nova] Releasing lock "refresh_cache-21056adb-d81e-45bd-b354-1bcb488d2ed9" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.412640] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 958.415412] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.059s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.421309] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.860s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.421572] env[62204]: DEBUG nova.objects.instance [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'resources' on Instance uuid c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.430047] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.444503] env[62204]: INFO nova.scheduler.client.report [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted allocations for instance 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a [ 958.468886] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.469263] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.469511] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.469795] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.470188] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.473090] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.473090] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.473090] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.473090] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.473090] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.473090] env[62204]: DEBUG nova.virt.hardware [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.474178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004e047e-4967-4c74-aeb7-ec38806d93ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.482993] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e9a9ba-7440-4bc0-b3ae-c31ce8de997d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.517903] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200158, 'name': ReconfigVM_Task, 'duration_secs': 0.487176} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.518216] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Reconfigured VM instance instance-0000005e to attach disk [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.518865] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a3639e6-4cae-4648-a573-018cfa2fb40c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.525476] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 958.525476] env[62204]: value = "task-1200160" [ 958.525476] env[62204]: _type = "Task" [ 958.525476] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.533912] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200160, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.554368] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200159, 'name': CreateVM_Task, 'duration_secs': 0.398477} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.554533] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.555215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.555382] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.555703] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.556046] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-125a7d2e-ca23-4b7b-aa78-54421258bc25 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.560609] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 958.560609] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52033e17-36cd-ae4c-6784-606b7e80d94b" [ 958.560609] env[62204]: _type = "Task" [ 958.560609] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.568546] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52033e17-36cd-ae4c-6784-606b7e80d94b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.577644] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200157, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592414} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.577894] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk. [ 958.578668] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df962a48-6505-4253-a4ff-8ab01a262c3d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.601372] env[62204]: INFO nova.compute.manager [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Detaching volume cd1bf708-e52b-48aa-ab88-cfd254e6c272 [ 958.610333] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.610985] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21b53897-7b66-4c63-879c-1c45761a4748 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.633065] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 958.633065] env[62204]: value = "task-1200161" [ 958.633065] env[62204]: _type = "Task" [ 958.633065] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.641199] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200161, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.664655] env[62204]: INFO nova.virt.block_device [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Attempting to driver detach volume cd1bf708-e52b-48aa-ab88-cfd254e6c272 from mountpoint /dev/sdb [ 958.665057] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 958.665285] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260068', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'name': 'volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2727dc46-98ed-435d-89ef-41bc20cda776', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'serial': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 958.666195] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2235601e-1e1e-494c-8ce6-4e517eda37d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.691817] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144da8ac-a4dd-45d5-b55f-12948dbd253b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.699332] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849a3275-61c7-477d-b9a6-af3b62f371cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.722103] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7eb16e-a197-492b-8d4d-c1f479fa9a62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.739803] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] The volume has not been displaced from its original location: [datastore2] volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272/volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 958.745253] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfiguring VM instance instance-00000034 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 958.745707] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc2c66a6-e220-4b3a-8b04-0788b87c9e4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.764099] env[62204]: DEBUG oslo_vmware.api [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 958.764099] env[62204]: value = "task-1200162" [ 958.764099] env[62204]: _type = "Task" [ 958.764099] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.774983] env[62204]: DEBUG oslo_vmware.api [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.804818] env[62204]: DEBUG nova.network.neutron [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Updated VIF entry in instance network info cache for port a6d6e5fe-b756-45fa-9069-cec641c128e1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 958.805348] env[62204]: DEBUG nova.network.neutron [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Updating instance_info_cache with network_info: [{"id": "a6d6e5fe-b756-45fa-9069-cec641c128e1", "address": "fa:16:3e:b3:33:d0", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6d6e5fe-b7", "ovs_interfaceid": "a6d6e5fe-b756-45fa-9069-cec641c128e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.925828] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.932249] env[62204]: DEBUG nova.objects.instance [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'numa_topology' on Instance uuid c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.954786] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d4232f87-fa32-4f1f-a3ea-ae3975974fbc tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "4fd0c913-8344-4fb9-91ad-f8ab64c6e89a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.255s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.042169] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200160, 'name': Rename_Task, 'duration_secs': 0.16677} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.042573] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.042948] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-600e28b6-bd01-4591-89e4-e3db7def532b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.050499] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 959.050499] env[62204]: value = "task-1200163" [ 959.050499] env[62204]: _type = "Task" [ 959.050499] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.060647] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.070694] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52033e17-36cd-ae4c-6784-606b7e80d94b, 'name': SearchDatastore_Task, 'duration_secs': 0.010978} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.071157] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.071540] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.071808] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.071961] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.072168] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.072441] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7c31b75-77cb-4716-a439-59eb46ee724e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.089999] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.090326] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.091011] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fecb77dc-1b37-45d7-8714-ca5e91f5dcde {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.097362] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 959.097362] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f3eda2-dff3-8dd2-ee56-c0798f0bd366" [ 959.097362] env[62204]: _type = "Task" [ 959.097362] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.106534] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f3eda2-dff3-8dd2-ee56-c0798f0bd366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.143678] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.279461] env[62204]: DEBUG oslo_vmware.api [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200162, 'name': ReconfigVM_Task, 'duration_secs': 0.222501} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.279873] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Reconfigured VM instance instance-00000034 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 959.285252] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd993fdd-73cf-4359-8461-ee1a4263efa1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.305951] env[62204]: DEBUG oslo_vmware.api [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 959.305951] env[62204]: value = "task-1200164" [ 959.305951] env[62204]: _type = "Task" [ 959.305951] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.309427] env[62204]: DEBUG oslo_concurrency.lockutils [req-ce278d3a-a468-40c7-9338-159cbaa5a0a6 req-ebb64bd5-10a4-42e7-86a9-a9acbd737fd5 service nova] Releasing lock "refresh_cache-0a383305-5b3b-4a7d-8834-d31e54eb4ba5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.315181] env[62204]: DEBUG oslo_vmware.api [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200164, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.402015] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.402420] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.402548] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.402775] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.403043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.405617] env[62204]: INFO nova.compute.manager [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Terminating instance [ 959.410688] env[62204]: DEBUG nova.compute.manager [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 959.411074] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.411998] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bcd475-bd46-4685-81c9-21bb79efed6c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.426012] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.430231] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdddcbd3-6fad-4c59-89e7-c33a7c06a723 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.432131] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.435255] env[62204]: DEBUG nova.objects.base [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 959.438968] env[62204]: DEBUG oslo_vmware.api [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 959.438968] env[62204]: value = "task-1200165" [ 959.438968] env[62204]: _type = "Task" [ 959.438968] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.451026] env[62204]: DEBUG oslo_vmware.api [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.561661] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200163, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.615833] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f3eda2-dff3-8dd2-ee56-c0798f0bd366, 'name': SearchDatastore_Task, 'duration_secs': 0.021675} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.616861] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe64a240-7aff-40a4-819d-e3948f0a1cc9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.622858] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 959.622858] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a00e9f-540d-b4bb-9367-fbb2a9e69bac" [ 959.622858] env[62204]: _type = "Task" [ 959.622858] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.633659] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a00e9f-540d-b4bb-9367-fbb2a9e69bac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.651267] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200161, 'name': ReconfigVM_Task, 'duration_secs': 0.674074} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.651267] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfigured VM instance instance-00000055 to attach disk [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5/c0e4d3a1-f965-49e2-ab05-fbf425872dcc-rescue.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 959.652802] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73c1331-9328-4480-999c-82303969ca36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.690150] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18a07af8-9500-4b1c-9e21-a63b17299375 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.702620] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 959.702620] env[62204]: value = "task-1200166" [ 959.702620] env[62204]: _type = "Task" [ 959.702620] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.712689] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200166, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.735218] env[62204]: DEBUG nova.network.neutron [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Successfully updated port: a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.818825] env[62204]: DEBUG oslo_vmware.api [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200164, 'name': ReconfigVM_Task, 'duration_secs': 0.135656} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.819172] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260068', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'name': 'volume-cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '2727dc46-98ed-435d-89ef-41bc20cda776', 'attached_at': '', 'detached_at': '', 'volume_id': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272', 'serial': 'cd1bf708-e52b-48aa-ab88-cfd254e6c272'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 959.828953] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec32438-ea25-4a54-b4f1-3e5bfea14657 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.836088] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747166ac-1c18-41bd-8563-f772098bdfcc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.870559] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0129c4-ea69-41b4-94dd-c51e00186f0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.878897] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd02e69-e81d-478e-8e90-2d12bd45d2e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.893447] env[62204]: DEBUG nova.compute.provider_tree [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.925904] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.948291] env[62204]: DEBUG oslo_vmware.api [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200165, 'name': PowerOffVM_Task, 'duration_secs': 0.261087} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.948596] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 959.948769] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 959.949089] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bd5412c-1ea1-439b-a491-8b88c1e440b8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.021432] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.021660] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.021887] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleting the datastore file [datastore2] bd0f87d1-e53a-4433-afc6-6aea7e68d6f3 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.022158] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3356d94-6b0d-4074-b591-ce5672efca4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.028649] env[62204]: DEBUG oslo_vmware.api [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 960.028649] env[62204]: value = "task-1200168" [ 960.028649] env[62204]: _type = "Task" [ 960.028649] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.038500] env[62204]: DEBUG oslo_vmware.api [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.061304] env[62204]: DEBUG oslo_vmware.api [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200163, 'name': PowerOnVM_Task, 'duration_secs': 0.54298} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.061578] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.061800] env[62204]: INFO nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Took 8.66 seconds to spawn the instance on the hypervisor. [ 960.061986] env[62204]: DEBUG nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 960.062769] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e492db-8161-451b-97b7-3750fca65a58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.068028] env[62204]: DEBUG nova.compute.manager [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Received event network-vif-plugged-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.068259] env[62204]: DEBUG oslo_concurrency.lockutils [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.068468] env[62204]: DEBUG oslo_concurrency.lockutils [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.068644] env[62204]: DEBUG oslo_concurrency.lockutils [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.068855] env[62204]: DEBUG nova.compute.manager [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] No waiting events found dispatching network-vif-plugged-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 960.069109] env[62204]: WARNING nova.compute.manager [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Received unexpected event network-vif-plugged-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee for instance with vm_state building and task_state spawning. [ 960.069333] env[62204]: DEBUG nova.compute.manager [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Received event network-changed-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 960.069519] env[62204]: DEBUG nova.compute.manager [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Refreshing instance network info cache due to event network-changed-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 960.069741] env[62204]: DEBUG oslo_concurrency.lockutils [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.069940] env[62204]: DEBUG oslo_concurrency.lockutils [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.070177] env[62204]: DEBUG nova.network.neutron [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Refreshing network info cache for port a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 960.084254] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.084254] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.134426] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a00e9f-540d-b4bb-9367-fbb2a9e69bac, 'name': SearchDatastore_Task, 'duration_secs': 0.025261} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.134776] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.135053] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 0a383305-5b3b-4a7d-8834-d31e54eb4ba5/0a383305-5b3b-4a7d-8834-d31e54eb4ba5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.135327] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6edeca07-8278-4db4-bc04-371606028f1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.141463] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 960.141463] env[62204]: value = "task-1200169" [ 960.141463] env[62204]: _type = "Task" [ 960.141463] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.149882] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.211832] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200166, 'name': ReconfigVM_Task, 'duration_secs': 0.360587} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.212164] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.212461] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab4cabe5-2c0d-40c1-a63b-2572a5a3ec80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.218256] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 960.218256] env[62204]: value = "task-1200170" [ 960.218256] env[62204]: _type = "Task" [ 960.218256] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.226553] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.236332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.379085] env[62204]: DEBUG nova.objects.instance [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'flavor' on Instance uuid 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.396436] env[62204]: DEBUG nova.scheduler.client.report [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 960.427846] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.538835] env[62204]: DEBUG oslo_vmware.api [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460985} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.539205] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 960.539448] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 960.539663] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 960.539885] env[62204]: INFO nova.compute.manager [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 960.540201] env[62204]: DEBUG oslo.service.loopingcall [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 960.540444] env[62204]: DEBUG nova.compute.manager [-] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 960.540553] env[62204]: DEBUG nova.network.neutron [-] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 960.586190] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 960.591385] env[62204]: INFO nova.compute.manager [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Took 21.12 seconds to build instance. [ 960.610631] env[62204]: DEBUG nova.network.neutron [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 960.661581] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200169, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.729971] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200170, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.901937] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.480s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.904141] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.135s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.905882] env[62204]: INFO nova.compute.claims [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.909832] env[62204]: DEBUG nova.network.neutron [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.927437] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.094355] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fb375551-7868-491f-b434-e9f372709073 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.627s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.111475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.153408] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200169, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.800246} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.153491] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 0a383305-5b3b-4a7d-8834-d31e54eb4ba5/0a383305-5b3b-4a7d-8834-d31e54eb4ba5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.153738] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.154052] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14aee773-e9fe-493b-b1bc-ba95f2f1def0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.162151] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 961.162151] env[62204]: value = "task-1200171" [ 961.162151] env[62204]: _type = "Task" [ 961.162151] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.172203] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200171, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.228541] env[62204]: DEBUG oslo_vmware.api [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200170, 'name': PowerOnVM_Task, 'duration_secs': 0.745657} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.228898] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.232490] env[62204]: DEBUG nova.compute.manager [None req-ad265e69-3b92-4ef9-b656-2c74a7662d4e tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 961.233354] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44369453-e72f-4613-a129-a6ce2d689637 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.387326] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6d6879c7-4282-4780-bb57-cdf360bc7450 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.298s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.416145] env[62204]: DEBUG oslo_concurrency.lockutils [req-ab867f90-d48c-41d7-948c-29a1704834e7 req-9c727046-2ddd-47e8-804b-9522e1111e9c service nova] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.416145] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.416145] env[62204]: DEBUG nova.network.neutron [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 961.416431] env[62204]: DEBUG oslo_concurrency.lockutils [None req-45b7e589-b414-4302-8f99-0044f2a51174 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 34.592s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.417276] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 9.790s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.417779] env[62204]: INFO nova.compute.manager [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Unshelving [ 961.429042] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.672601] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200171, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07149} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.672601] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.673280] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d37f3bd-9ffe-4f80-853e-74c8d394f15e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.698659] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 0a383305-5b3b-4a7d-8834-d31e54eb4ba5/0a383305-5b3b-4a7d-8834-d31e54eb4ba5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.699627] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cad7da5-b75d-4744-b109-c609c2a4b3fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.725387] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 961.725387] env[62204]: value = "task-1200172" [ 961.725387] env[62204]: _type = "Task" [ 961.725387] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.736146] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200172, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.762576] env[62204]: DEBUG nova.network.neutron [-] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.938725] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.967489] env[62204]: DEBUG nova.network.neutron [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 962.027039] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.027324] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.027955] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.027955] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.027955] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.030579] env[62204]: INFO nova.compute.manager [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Terminating instance [ 962.032526] env[62204]: DEBUG nova.compute.manager [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 962.032711] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.033545] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d0649a-b75d-4f01-8565-fdf93970f235 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.045882] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.046198] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81690304-77be-4f71-8e1b-2afb6eb586e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.052262] env[62204]: DEBUG oslo_vmware.api [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 962.052262] env[62204]: value = "task-1200173" [ 962.052262] env[62204]: _type = "Task" [ 962.052262] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.063482] env[62204]: DEBUG oslo_vmware.api [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.220175] env[62204]: INFO nova.compute.manager [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Rebuilding instance [ 962.224083] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4dba6fe-5e74-4d93-81c2-ea369f625777 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.241247] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dbaff8b-86da-426b-bf38-da9f24ce8dbe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.245522] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200172, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.246823] env[62204]: DEBUG nova.network.neutron [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.279727] env[62204]: INFO nova.compute.manager [-] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Took 1.74 seconds to deallocate network for instance. [ 962.287862] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d611a176-6362-4e3f-a2a6-c1faadf71b36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.292043] env[62204]: DEBUG nova.compute.manager [req-a780a669-e954-4a0b-ae31-fee0f28d71eb req-24e3992e-0469-405b-9983-662d48ea1075 service nova] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Received event network-vif-deleted-337d50b5-86ec-4cc3-92f3-86bd3fe37b54 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.302184] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c14244-0359-4061-9aa2-33c97bdf9ba9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.320520] env[62204]: DEBUG nova.compute.provider_tree [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.321859] env[62204]: DEBUG nova.compute.manager [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 962.322849] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c378ba-4364-4304-9adc-013c13256298 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.431699] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.447514] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.561939] env[62204]: DEBUG oslo_vmware.api [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200173, 'name': PowerOffVM_Task, 'duration_secs': 0.212555} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.562284] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.562474] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.562787] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e560f093-1d6f-420e-88ef-4dc3b0b4946a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.630252] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.630500] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.630877] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleting the datastore file [datastore1] 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.631049] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77c9a570-708d-4dfb-a467-8d09d8cc174f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.638528] env[62204]: DEBUG oslo_vmware.api [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 962.638528] env[62204]: value = "task-1200175" [ 962.638528] env[62204]: _type = "Task" [ 962.638528] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.647341] env[62204]: DEBUG oslo_vmware.api [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.739771] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200172, 'name': ReconfigVM_Task, 'duration_secs': 0.749754} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.740286] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 0a383305-5b3b-4a7d-8834-d31e54eb4ba5/0a383305-5b3b-4a7d-8834-d31e54eb4ba5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.741036] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b04df79b-5715-46c5-8179-55f7959ce864 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.747800] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 962.747800] env[62204]: value = "task-1200176" [ 962.747800] env[62204]: _type = "Task" [ 962.747800] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.751167] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 962.751492] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Instance network_info: |[{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 962.751949] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:31:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.760977] env[62204]: DEBUG oslo.service.loopingcall [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 962.761647] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.761973] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b8f9299-2b8a-46f7-859e-a0d99a099a62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.780133] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200176, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.785271] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.785271] env[62204]: value = "task-1200177" [ 962.785271] env[62204]: _type = "Task" [ 962.785271] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.793973] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200177, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.797176] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.827656] env[62204]: DEBUG nova.scheduler.client.report [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 962.835968] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.836308] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dfcf7338-bfb6-4706-bb5f-12a68a2400d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.842683] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 962.842683] env[62204]: value = "task-1200178" [ 962.842683] env[62204]: _type = "Task" [ 962.842683] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.853375] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200178, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.934468] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.085047] env[62204]: INFO nova.compute.manager [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Unrescuing [ 963.085404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.085404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.085404] env[62204]: DEBUG nova.network.neutron [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 963.150584] env[62204]: DEBUG oslo_vmware.api [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317441} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.151062] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.151414] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.151667] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 963.151917] env[62204]: INFO nova.compute.manager [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Took 1.12 seconds to destroy the instance on the hypervisor. [ 963.152284] env[62204]: DEBUG oslo.service.loopingcall [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 963.152527] env[62204]: DEBUG nova.compute.manager [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 963.152636] env[62204]: DEBUG nova.network.neutron [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 963.260984] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200176, 'name': Rename_Task, 'duration_secs': 0.202963} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.261142] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.261353] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc8656d8-92cc-4f4a-a641-f7f76d3643a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.267979] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 963.267979] env[62204]: value = "task-1200179" [ 963.267979] env[62204]: _type = "Task" [ 963.267979] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.277141] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.295159] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200177, 'name': CreateVM_Task, 'duration_secs': 0.419586} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.295364] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.296262] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.296459] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.296916] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 963.297253] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e06aa2d3-cd61-4ccf-a690-36b7ef5d9644 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.302132] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 963.302132] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260776c-5958-f88f-3ce4-cad5c4d5bd5f" [ 963.302132] env[62204]: _type = "Task" [ 963.302132] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.310835] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260776c-5958-f88f-3ce4-cad5c4d5bd5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.333026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.429s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.333641] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 963.337077] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.523s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.337340] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.339619] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.639s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.341069] env[62204]: INFO nova.compute.claims [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 963.352365] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200178, 'name': PowerOffVM_Task, 'duration_secs': 0.184217} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.352532] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.352763] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.353561] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131fc934-f3ec-46c0-b3ec-b89a57351d6e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.361979] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 963.362354] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba9dcbc5-1403-497c-b756-6f4aa5db2414 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.382362] env[62204]: INFO nova.scheduler.client.report [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocations for instance 7b7032a8-8093-43fb-b2e2-c6308d96e819 [ 963.431923] env[62204]: DEBUG oslo_vmware.api [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200155, 'name': ReconfigVM_Task, 'duration_secs': 5.832194} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.432641] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.432908] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Reconfigured VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 963.437238] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 963.437238] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 963.437427] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleting the datastore file [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 963.441311] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4eff6ce7-bd1d-40ce-9a48-28976c7d6b62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.445112] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 963.445112] env[62204]: value = "task-1200181" [ 963.445112] env[62204]: _type = "Task" [ 963.445112] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.453417] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200181, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.780981] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200179, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.819035] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5260776c-5958-f88f-3ce4-cad5c4d5bd5f, 'name': SearchDatastore_Task, 'duration_secs': 0.010386} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.819035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 963.819035] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.819035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 963.819035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.819035] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.819035] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-075ece67-f260-43eb-b25a-d6cd6392cbfd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.827877] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.828084] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.828954] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c59aed3d-5fde-44d9-ad4c-e59e782e893d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.835733] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 963.835733] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e614f8-e655-1613-5a08-10ea3b467a4c" [ 963.835733] env[62204]: _type = "Task" [ 963.835733] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.843567] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e614f8-e655-1613-5a08-10ea3b467a4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.845863] env[62204]: DEBUG nova.compute.utils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 963.849526] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 963.849706] env[62204]: DEBUG nova.network.neutron [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 963.890346] env[62204]: DEBUG oslo_concurrency.lockutils [None req-da73f3a1-e031-40d6-a144-2184fd514888 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "7b7032a8-8093-43fb-b2e2-c6308d96e819" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.194s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.897896] env[62204]: DEBUG nova.policy [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2edea246e74173bbdb4365d0309cd7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be5f3f8b28ab4b63a2621b1fe1383af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 963.956212] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200181, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232717} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.956471] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 963.956692] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 963.956879] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 964.188197] env[62204]: DEBUG nova.network.neutron [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Successfully created port: ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.191733] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b04ee8-5c14-1edf-7f43-492b78dade95/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 964.193314] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c82f5c-8003-4bed-b047-c65e04592c38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.197791] env[62204]: DEBUG nova.network.neutron [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.205226] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b04ee8-5c14-1edf-7f43-492b78dade95/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 964.205226] env[62204]: ERROR oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b04ee8-5c14-1edf-7f43-492b78dade95/disk-0.vmdk due to incomplete transfer. [ 964.205226] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-15a4cc5b-9d81-4fc3-a3da-bb62247e7c26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.213932] env[62204]: DEBUG oslo_vmware.rw_handles [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b04ee8-5c14-1edf-7f43-492b78dade95/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 964.213932] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Uploaded image 8af26ec1-5c36-47eb-bae1-70ed8da45531 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 964.219935] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 964.220826] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-6f951b87-7434-43e1-90ca-a77e5f2768cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.228708] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 964.228708] env[62204]: value = "task-1200182" [ 964.228708] env[62204]: _type = "Task" [ 964.228708] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.237683] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200182, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.278997] env[62204]: DEBUG oslo_vmware.api [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200179, 'name': PowerOnVM_Task, 'duration_secs': 0.553115} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.279333] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.279907] env[62204]: INFO nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Took 10.34 seconds to spawn the instance on the hypervisor. [ 964.279907] env[62204]: DEBUG nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 964.280523] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154bbb26-38bc-4dc6-96af-c8e935157646 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.330704] env[62204]: DEBUG nova.compute.manager [req-8e0bb125-989e-4b33-8282-c2ca4d09c3b2 req-419fd65f-f2e1-4796-bc1e-72ee4869d1c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Received event network-vif-deleted-52d592a0-434a-4f17-8db6-39bf5d505429 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.330919] env[62204]: INFO nova.compute.manager [req-8e0bb125-989e-4b33-8282-c2ca4d09c3b2 req-419fd65f-f2e1-4796-bc1e-72ee4869d1c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Neutron deleted interface 52d592a0-434a-4f17-8db6-39bf5d505429; detaching it from the instance and deleting it from the info cache [ 964.331122] env[62204]: DEBUG nova.network.neutron [req-8e0bb125-989e-4b33-8282-c2ca4d09c3b2 req-419fd65f-f2e1-4796-bc1e-72ee4869d1c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.347963] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e614f8-e655-1613-5a08-10ea3b467a4c, 'name': SearchDatastore_Task, 'duration_secs': 0.024927} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.350158] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 964.352530] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cf10dbf-cb3c-4b73-b6ae-0d6a30c4697b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.361644] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 964.361644] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b0e9b8-1dff-db0f-07f4-80c8bacbe049" [ 964.361644] env[62204]: _type = "Task" [ 964.361644] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.370020] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b0e9b8-1dff-db0f-07f4-80c8bacbe049, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.618829] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39481c5f-85e5-41ac-8eae-02081a07fe0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.626464] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153bc5bd-1f86-4d41-a122-042f65538b73 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.670445] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dfc43f8-94f5-47d1-ae23-8b7ad1d44f0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.674679] env[62204]: DEBUG nova.network.neutron [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.679162] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e75a23-9900-4d71-9769-baa37625efd9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.696575] env[62204]: DEBUG nova.compute.provider_tree [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.705127] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.705798] env[62204]: DEBUG nova.objects.instance [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'flavor' on Instance uuid d97d792d-614f-42e3-8516-6c0a7cf15ad5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.740099] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200182, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.795021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.795021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.795021] env[62204]: DEBUG nova.network.neutron [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 964.805794] env[62204]: INFO nova.compute.manager [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Took 24.37 seconds to build instance. [ 964.834116] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4451da6-cd7a-416f-9381-ab19484df54a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.843653] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc6be97-8e1e-4859-8661-5a99d81fd678 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.877541] env[62204]: DEBUG nova.compute.manager [req-8e0bb125-989e-4b33-8282-c2ca4d09c3b2 req-419fd65f-f2e1-4796-bc1e-72ee4869d1c8 service nova] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Detach interface failed, port_id=52d592a0-434a-4f17-8db6-39bf5d505429, reason: Instance 2727dc46-98ed-435d-89ef-41bc20cda776 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 964.883316] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b0e9b8-1dff-db0f-07f4-80c8bacbe049, 'name': SearchDatastore_Task, 'duration_secs': 0.038604} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.883584] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 964.883917] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.884206] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5807029a-6e07-4b37-826c-88b889090c2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.891103] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 964.891103] env[62204]: value = "task-1200183" [ 964.891103] env[62204]: _type = "Task" [ 964.891103] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.900747] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.998116] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 964.998630] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 964.998785] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.999140] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 964.999340] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.999519] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 964.999765] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 964.999939] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.000140] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.000324] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.000530] env[62204]: DEBUG nova.virt.hardware [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.001458] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cb8f3d-69e3-4215-9d8c-1e487061d914 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.010626] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e2bd9a-c859-4144-8363-9ff542a4980b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.026886] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:85:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca6a6668-486c-47f3-bbb8-5902729c6304', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.035452] env[62204]: DEBUG oslo.service.loopingcall [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 965.036300] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.036633] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9fa256c-e404-40b8-8687-4c93e7ecc616 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.056917] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.056917] env[62204]: value = "task-1200184" [ 965.056917] env[62204]: _type = "Task" [ 965.056917] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.068508] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200184, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.105059] env[62204]: DEBUG nova.compute.manager [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 965.105268] env[62204]: DEBUG nova.compute.manager [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing instance network info cache due to event network-changed-55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 965.106766] env[62204]: DEBUG oslo_concurrency.lockutils [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] Acquiring lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.179186] env[62204]: INFO nova.compute.manager [-] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Took 2.03 seconds to deallocate network for instance. [ 965.201794] env[62204]: DEBUG nova.scheduler.client.report [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 965.212061] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3d577a-7f40-458d-8550-763debd3017a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.240944] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.244951] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e5e6c5d-edc9-4a8a-98b9-979744d341a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.253121] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200182, 'name': Destroy_Task, 'duration_secs': 1.004295} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.253444] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Destroyed the VM [ 965.253754] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 965.255420] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-67da22e2-4aac-4d9f-a943-d5400086ab83 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.257927] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 965.257927] env[62204]: value = "task-1200185" [ 965.257927] env[62204]: _type = "Task" [ 965.257927] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.272111] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 965.272111] env[62204]: value = "task-1200186" [ 965.272111] env[62204]: _type = "Task" [ 965.272111] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.272451] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.281474] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200186, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.308335] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a85ee4cc-5fe3-47ac-bad7-8eeb56262dc1 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.879s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.363243] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 965.407968] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 965.408471] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 965.408734] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.409044] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 965.409273] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.409487] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 965.409776] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 965.410008] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 965.410309] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 965.410680] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 965.410772] env[62204]: DEBUG nova.virt.hardware [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 965.412696] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cae83c-6757-4500-8540-9aba960049c0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.423479] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200183, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.428278] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed82d443-fad2-40be-a5cd-4672df3a8d09 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.570373] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200184, 'name': CreateVM_Task, 'duration_secs': 0.45068} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.570566] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.571277] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.571476] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.571821] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 965.572122] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-373f410d-0fd6-42e2-a7ed-15110df58fa8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.577046] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 965.577046] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cd9133-0da6-00c5-9ac2-2fbd0d9a4918" [ 965.577046] env[62204]: _type = "Task" [ 965.577046] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.585334] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cd9133-0da6-00c5-9ac2-2fbd0d9a4918, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.687834] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.705722] env[62204]: INFO nova.network.neutron [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Port 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 965.706126] env[62204]: DEBUG nova.network.neutron [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.707848] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.708334] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 965.710896] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.600s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.713608] env[62204]: INFO nova.compute.claims [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 965.770466] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200185, 'name': PowerOffVM_Task, 'duration_secs': 0.265195} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.770601] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.775973] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfiguring VM instance instance-00000055 to detach disk 2002 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 965.776324] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d86e5cb-dea7-4b4e-afaf-8196bc7c7878 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.797339] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200186, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.798652] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 965.798652] env[62204]: value = "task-1200187" [ 965.798652] env[62204]: _type = "Task" [ 965.798652] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.806276] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200187, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.879177] env[62204]: DEBUG nova.network.neutron [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Successfully updated port: ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.902755] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69951} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.903078] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.903533] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.903615] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f4dc57b-d7d4-4ad5-8b0e-cf0475b1c7dc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.905818] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-7671c77f-3da8-4a41-a472-138c7bd23a92-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.906058] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-7671c77f-3da8-4a41-a472-138c7bd23a92-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.906561] env[62204]: DEBUG nova.objects.instance [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'flavor' on Instance uuid 7671c77f-3da8-4a41-a472-138c7bd23a92 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.912846] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 965.912846] env[62204]: value = "task-1200188" [ 965.912846] env[62204]: _type = "Task" [ 965.912846] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.920223] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200188, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.087247] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52cd9133-0da6-00c5-9ac2-2fbd0d9a4918, 'name': SearchDatastore_Task, 'duration_secs': 0.013283} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.087561] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.087804] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.088057] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.088285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.088397] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.088656] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85df2a3b-efe4-4403-b0dc-f7ae20ccfdfb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.105301] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 966.105499] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 966.106238] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ee690dd-27ec-4eec-8e69-2d79d1224d4c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.111598] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 966.111598] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fef7e4-7549-5de1-9345-9f9f2c6fa7ac" [ 966.111598] env[62204]: _type = "Task" [ 966.111598] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.119103] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fef7e4-7549-5de1-9345-9f9f2c6fa7ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.212156] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.214413] env[62204]: DEBUG oslo_concurrency.lockutils [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] Acquired lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.214654] env[62204]: DEBUG nova.network.neutron [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Refreshing network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 966.216773] env[62204]: DEBUG nova.compute.utils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 966.220432] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 966.220663] env[62204]: DEBUG nova.network.neutron [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 966.263711] env[62204]: DEBUG nova.policy [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '878e00a270c145fdb01ed53adea00645', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4350e9a0bf0c45d3b37c8dc6bddbcfa9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 966.287906] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200186, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.308761] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200187, 'name': ReconfigVM_Task, 'duration_secs': 0.242648} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.310348] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfigured VM instance instance-00000055 to detach disk 2002 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 966.310348] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.310348] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f4df23e-a9db-43ec-8681-1523a29e0540 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.320021] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 966.320021] env[62204]: value = "task-1200189" [ 966.320021] env[62204]: _type = "Task" [ 966.320021] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.326202] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200189, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.381353] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 966.381790] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.383070] env[62204]: DEBUG nova.network.neutron [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 966.422177] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200188, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066198} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.422763] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.423410] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db83938c-1cba-48fe-ae64-92a9c4bd022e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.447482] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.450316] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7defc625-7d7e-4e4f-9ab6-c91d75670a77 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.470876] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 966.470876] env[62204]: value = "task-1200190" [ 966.470876] env[62204]: _type = "Task" [ 966.470876] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.479446] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200190, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.519494] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.519799] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.520030] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 966.520229] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.520405] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.523895] env[62204]: INFO nova.compute.manager [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Terminating instance [ 966.527964] env[62204]: DEBUG nova.compute.manager [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 966.528750] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 966.529145] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8f4a07-1ed2-4ca0-95ba-9acd5e908922 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.542044] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.542334] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ae2c4a4-91e4-470b-ab08-b5c368893130 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.549619] env[62204]: DEBUG nova.network.neutron [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Successfully created port: bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.551764] env[62204]: DEBUG oslo_vmware.api [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 966.551764] env[62204]: value = "task-1200191" [ 966.551764] env[62204]: _type = "Task" [ 966.551764] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.560270] env[62204]: DEBUG oslo_vmware.api [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.588454] env[62204]: DEBUG nova.objects.instance [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'pci_requests' on Instance uuid 7671c77f-3da8-4a41-a472-138c7bd23a92 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.623441] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fef7e4-7549-5de1-9345-9f9f2c6fa7ac, 'name': SearchDatastore_Task, 'duration_secs': 0.035784} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.624305] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a13fea1-2780-43a1-9099-e822551cd72b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.630773] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 966.630773] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523924e6-e814-8b96-ec63-e4a71d30490c" [ 966.630773] env[62204]: _type = "Task" [ 966.630773] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.639056] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523924e6-e814-8b96-ec63-e4a71d30490c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.718028] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fe84a559-8d4f-4d1e-b128-7b84f386d4ee tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-274285e5-fc23-48b4-b0d6-5a67bc764d78-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.949s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.726269] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 966.793026] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200186, 'name': RemoveSnapshot_Task, 'duration_secs': 1.125083} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.793189] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 966.793573] env[62204]: DEBUG nova.compute.manager [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 966.794714] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba1b41eb-5e34-4d8c-ba38-26026e3cdd30 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.832846] env[62204]: DEBUG oslo_vmware.api [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200189, 'name': PowerOnVM_Task, 'duration_secs': 0.438004} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.837170] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.837586] env[62204]: DEBUG nova.compute.manager [None req-d978bcf0-9b27-4037-8c5f-45728414f613 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 966.839532] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041e6322-2e2d-4869-b37c-e7b5a7fc5094 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.942775] env[62204]: DEBUG nova.network.neutron [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 966.982302] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200190, 'name': ReconfigVM_Task, 'duration_secs': 0.457846} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.983723] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to attach disk [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.983723] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dd5ae2b-f2e0-4e3d-9888-a19d540610fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.989632] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 966.989632] env[62204]: value = "task-1200192" [ 966.989632] env[62204]: _type = "Task" [ 966.989632] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.999928] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200192, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.033276] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83c4cf0-22df-4508-aa5d-4472117d9c03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.043876] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94dc9e9a-d00e-4427-9e0c-4c7f9032203c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.088069] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266011ec-09ad-4874-9354-fdf5175d3161 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.091113] env[62204]: DEBUG nova.network.neutron [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updated VIF entry in instance network info cache for port 55c5fd12-e601-44a8-ab4f-2fb4f263333e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 967.091477] env[62204]: DEBUG nova.network.neutron [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [{"id": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "address": "fa:16:3e:a5:8b:3a", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c5fd12-e6", "ovs_interfaceid": "55c5fd12-e601-44a8-ab4f-2fb4f263333e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.092993] env[62204]: DEBUG nova.objects.base [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Object Instance<7671c77f-3da8-4a41-a472-138c7bd23a92> lazy-loaded attributes: flavor,pci_requests {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 967.093196] env[62204]: DEBUG nova.network.neutron [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 967.102544] env[62204]: DEBUG oslo_vmware.api [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200191, 'name': PowerOffVM_Task, 'duration_secs': 0.162489} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.103712] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f057557d-486e-45f4-9383-3d8eb2a7cbf2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.107803] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.107955] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 967.109038] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67f08f16-4526-40fe-8e60-eb8b88957385 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.120965] env[62204]: DEBUG nova.compute.provider_tree [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 967.139979] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523924e6-e814-8b96-ec63-e4a71d30490c, 'name': SearchDatastore_Task, 'duration_secs': 0.009358} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.140296] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.140559] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.140819] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-baeea1b1-8f63-4484-ad47-9c4d93192844 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.146394] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 967.146394] env[62204]: value = "task-1200194" [ 967.146394] env[62204]: _type = "Task" [ 967.146394] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.154433] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.168552] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 967.168832] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 967.168954] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore2] 0a383305-5b3b-4a7d-8834-d31e54eb4ba5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 967.169238] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07845b78-8c45-4a0b-8417-29f984337231 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.176018] env[62204]: DEBUG oslo_vmware.api [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 967.176018] env[62204]: value = "task-1200195" [ 967.176018] env[62204]: _type = "Task" [ 967.176018] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.184117] env[62204]: DEBUG oslo_vmware.api [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.314431] env[62204]: INFO nova.compute.manager [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Shelve offloading [ 967.316318] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 967.316641] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea02fb42-592e-4371-b777-142dd8513beb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.323846] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 967.323846] env[62204]: value = "task-1200196" [ 967.323846] env[62204]: _type = "Task" [ 967.323846] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.331924] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.463972] env[62204]: DEBUG nova.policy [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '478b22c814424575af79a8af808398a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '81dc15a8604e4900845b79c75cc5ef16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 967.491403] env[62204]: DEBUG nova.network.neutron [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [{"id": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "address": "fa:16:3e:27:10:6c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba57ac2e-ab", "ovs_interfaceid": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.499476] env[62204]: DEBUG nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-changed-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.499476] env[62204]: DEBUG nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing instance network info cache due to event network-changed-4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 967.499673] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.499913] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.500051] env[62204]: DEBUG nova.network.neutron [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 967.509677] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200192, 'name': Rename_Task, 'duration_secs': 0.278008} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.510022] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.510286] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab7e4223-425d-4774-9376-a30d1920967c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.520376] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 967.520376] env[62204]: value = "task-1200197" [ 967.520376] env[62204]: _type = "Task" [ 967.520376] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.529667] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200197, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.596243] env[62204]: DEBUG oslo_concurrency.lockutils [req-14abd17e-0f2e-4587-8d70-52dd8dbb661d req-8fb138c1-8401-408b-bb00-090dd387fb40 service nova] Releasing lock "refresh_cache-274285e5-fc23-48b4-b0d6-5a67bc764d78" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.623717] env[62204]: DEBUG nova.scheduler.client.report [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 967.656212] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497825} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.656513] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 967.656808] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 967.657088] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c77bb2ea-5eea-402a-a5bd-79a53c08bb4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.663580] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 967.663580] env[62204]: value = "task-1200198" [ 967.663580] env[62204]: _type = "Task" [ 967.663580] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.671215] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.684793] env[62204]: DEBUG oslo_vmware.api [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281459} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.685071] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 967.685279] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 967.685467] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 967.685671] env[62204]: INFO nova.compute.manager [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 967.685980] env[62204]: DEBUG oslo.service.loopingcall [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 967.686220] env[62204]: DEBUG nova.compute.manager [-] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 967.686328] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 967.739373] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 967.776567] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 967.776878] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 967.777081] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 967.777340] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 967.777522] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 967.777704] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 967.777928] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 967.778106] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 967.778278] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 967.778440] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 967.778618] env[62204]: DEBUG nova.virt.hardware [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.779823] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1338b9b-135f-408c-8b00-3d4a9b055fa7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.788377] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b00398-434c-4b33-9dcc-69eb4d555169 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.834393] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 967.834646] env[62204]: DEBUG nova.compute.manager [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 967.835467] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa504733-f5d0-471a-b117-d504ab92598e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.841629] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 967.841807] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.841983] env[62204]: DEBUG nova.network.neutron [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 967.998438] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 967.998789] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Instance network_info: |[{"id": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "address": "fa:16:3e:27:10:6c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba57ac2e-ab", "ovs_interfaceid": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 967.999267] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:10:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.007297] env[62204]: DEBUG oslo.service.loopingcall [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 968.009983] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 968.010283] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-312ccbcb-46e8-45f9-b1b4-319ebdf7139c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.038958] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200197, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.040313] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.040313] env[62204]: value = "task-1200199" [ 968.040313] env[62204]: _type = "Task" [ 968.040313] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.051081] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200199, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.122899] env[62204]: DEBUG nova.network.neutron [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Successfully updated port: bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.129402] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.131216] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 968.132922] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.685s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.133787] env[62204]: DEBUG nova.objects.instance [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'pci_requests' on Instance uuid c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.181638] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064061} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.185426] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.188038] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61460ea1-d246-4e03-8419-3568b4fcd9f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.219495] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.222484] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90254b4f-6253-4ec3-bb3c-15e610261e41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.244885] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 968.244885] env[62204]: value = "task-1200200" [ 968.244885] env[62204]: _type = "Task" [ 968.244885] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.254212] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.342861] env[62204]: DEBUG nova.network.neutron [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updated VIF entry in instance network info cache for port 4243893b-2fda-4a71-94f3-332643bceb52. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 968.343401] env[62204]: DEBUG nova.network.neutron [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.538572] env[62204]: DEBUG oslo_vmware.api [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200197, 'name': PowerOnVM_Task, 'duration_secs': 0.748605} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.539531] env[62204]: DEBUG nova.network.neutron [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updating instance_info_cache with network_info: [{"id": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "address": "fa:16:3e:a0:dd:91", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d2819e6-83", "ovs_interfaceid": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.540767] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.540925] env[62204]: INFO nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Took 10.13 seconds to spawn the instance on the hypervisor. [ 968.541178] env[62204]: DEBUG nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 968.542371] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae1b702-121e-41aa-a652-7df3632883a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.557084] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200199, 'name': CreateVM_Task, 'duration_secs': 0.344876} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.557446] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.558148] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.558322] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.558644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 968.558893] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa2eea46-288b-499b-86dc-9cd1e99b91a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.563314] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 968.563314] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ba83ae-3478-78d3-0d78-df84f3dea06c" [ 968.563314] env[62204]: _type = "Task" [ 968.563314] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.571188] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ba83ae-3478-78d3-0d78-df84f3dea06c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.611113] env[62204]: DEBUG nova.network.neutron [-] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.625733] env[62204]: DEBUG nova.compute.manager [req-56c3f291-ee61-43c0-89c0-addb4c279cc2 req-11dedea5-6e49-4fb9-a453-b158860396a2 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Received event network-vif-deleted-a6d6e5fe-b756-45fa-9069-cec641c128e1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.625817] env[62204]: INFO nova.compute.manager [req-56c3f291-ee61-43c0-89c0-addb4c279cc2 req-11dedea5-6e49-4fb9-a453-b158860396a2 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Neutron deleted interface a6d6e5fe-b756-45fa-9069-cec641c128e1; detaching it from the instance and deleting it from the info cache [ 968.625985] env[62204]: DEBUG nova.network.neutron [req-56c3f291-ee61-43c0-89c0-addb4c279cc2 req-11dedea5-6e49-4fb9-a453-b158860396a2 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.628548] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.628548] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquired lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.628548] env[62204]: DEBUG nova.network.neutron [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 968.636885] env[62204]: DEBUG nova.compute.utils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 968.640033] env[62204]: DEBUG nova.objects.instance [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'numa_topology' on Instance uuid c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.640860] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 968.641480] env[62204]: DEBUG nova.network.neutron [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 968.685273] env[62204]: DEBUG nova.policy [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '370d4b8a24b84bf0a626d056c7758863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fb9a24ef26c74781a2ad36e3430ce630', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 968.754959] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.846103] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 968.846418] env[62204]: DEBUG nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Received event network-vif-plugged-ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.846714] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Acquiring lock "2178b629-4be6-473b-9a75-19efa234d442-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.846942] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Lock "2178b629-4be6-473b-9a75-19efa234d442-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.847127] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Lock "2178b629-4be6-473b-9a75-19efa234d442-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.847304] env[62204]: DEBUG nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] No waiting events found dispatching network-vif-plugged-ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 968.847480] env[62204]: WARNING nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Received unexpected event network-vif-plugged-ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c for instance with vm_state building and task_state spawning. [ 968.847652] env[62204]: DEBUG nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Received event network-changed-ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 968.847813] env[62204]: DEBUG nova.compute.manager [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Refreshing instance network info cache due to event network-changed-ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 968.848138] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Acquiring lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 968.848138] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Acquired lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.848307] env[62204]: DEBUG nova.network.neutron [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Refreshing network info cache for port ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 969.009174] env[62204]: DEBUG nova.network.neutron [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Successfully created port: 6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 969.042611] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.072175] env[62204]: INFO nova.compute.manager [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Took 23.63 seconds to build instance. [ 969.080503] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ba83ae-3478-78d3-0d78-df84f3dea06c, 'name': SearchDatastore_Task, 'duration_secs': 0.009627} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.080833] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 969.081110] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.081507] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 969.081588] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.082028] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.082028] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b5fc6df-63f0-4a56-b65c-df7decd5c242 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.092593] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.092872] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.093735] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e733af8-e71a-4acb-8fa2-549d30108e99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.100151] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 969.100151] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524fdba8-cc7f-d97b-e7ef-3eceb9e23d1b" [ 969.100151] env[62204]: _type = "Task" [ 969.100151] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.108948] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524fdba8-cc7f-d97b-e7ef-3eceb9e23d1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.116403] env[62204]: INFO nova.compute.manager [-] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Took 1.43 seconds to deallocate network for instance. [ 969.132947] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6fd842d9-d16c-4b09-b605-e7347050415a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.144160] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57b6313-1cf1-4123-aa82-9024c0b9fdc6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.161080] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 969.163785] env[62204]: INFO nova.compute.claims [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.189048] env[62204]: DEBUG nova.compute.manager [req-56c3f291-ee61-43c0-89c0-addb4c279cc2 req-11dedea5-6e49-4fb9-a453-b158860396a2 service nova] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Detach interface failed, port_id=a6d6e5fe-b756-45fa-9069-cec641c128e1, reason: Instance 0a383305-5b3b-4a7d-8834-d31e54eb4ba5 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 969.255149] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200200, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.266081] env[62204]: DEBUG nova.network.neutron [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 969.577856] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 969.578128] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d277f861-cebb-4827-8bd8-090cccd3b5ee tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.142s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 969.578813] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9b3280-c001-4f15-954b-08fa602ae8a7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.588425] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.588690] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b32e2fb9-d35b-444e-a799-d52f382da867 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.609768] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524fdba8-cc7f-d97b-e7ef-3eceb9e23d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.021324} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.610669] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59a5d530-156e-4ab6-93dd-3132efb2c679 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.616598] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 969.616598] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520830ff-9fc6-eb7a-77b9-db3c57af18a5" [ 969.616598] env[62204]: _type = "Task" [ 969.616598] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.625775] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.626114] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520830ff-9fc6-eb7a-77b9-db3c57af18a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.678670] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.678900] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.679095] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleting the datastore file [datastore1] d43dafa1-770f-4455-a3d8-9d08742b1fb6 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.679874] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0189066b-4173-4007-8885-07111476a2c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.682673] env[62204]: DEBUG nova.network.neutron [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updated VIF entry in instance network info cache for port ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 969.683012] env[62204]: DEBUG nova.network.neutron [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [{"id": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "address": "fa:16:3e:27:10:6c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba57ac2e-ab", "ovs_interfaceid": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.688743] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 969.688743] env[62204]: value = "task-1200202" [ 969.688743] env[62204]: _type = "Task" [ 969.688743] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.698682] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.755579] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200200, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.852569] env[62204]: DEBUG nova.network.neutron [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Updating instance_info_cache with network_info: [{"id": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "address": "fa:16:3e:9f:69:8e", "network": {"id": "ab0d7024-e74f-4172-9333-410e94a41063", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-122713931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4350e9a0bf0c45d3b37c8dc6bddbcfa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde9750e-7c", "ovs_interfaceid": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.127825] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520830ff-9fc6-eb7a-77b9-db3c57af18a5, 'name': SearchDatastore_Task, 'duration_secs': 0.014141} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.128192] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.128478] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2178b629-4be6-473b-9a75-19efa234d442/2178b629-4be6-473b-9a75-19efa234d442.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.128761] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-573f4ef6-9802-4729-a445-bfdb10d11413 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.135809] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 970.135809] env[62204]: value = "task-1200203" [ 970.135809] env[62204]: _type = "Task" [ 970.135809] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.143657] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200203, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.180107] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 970.185428] env[62204]: DEBUG oslo_concurrency.lockutils [req-1046d8cb-3e39-48f7-8600-bf019918d5be req-6e331885-45a2-492c-b51e-93fd103477a5 service nova] Releasing lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.198980] env[62204]: DEBUG oslo_vmware.api [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200202, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.48041} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.199721] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 970.199925] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 970.200218] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 970.217988] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.218271] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.218503] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.218719] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.218893] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.219361] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.219560] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.219725] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.219898] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.220086] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.220263] env[62204]: DEBUG nova.virt.hardware [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.221329] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c99bce8-ffff-4925-8917-a7985025ff6d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.229310] env[62204]: INFO nova.scheduler.client.report [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocations for instance d43dafa1-770f-4455-a3d8-9d08742b1fb6 [ 970.235191] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd8ff32-17e4-497b-80c6-6f141919bdb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.245593] env[62204]: DEBUG nova.network.neutron [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Successfully updated port: 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.273699] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200200, 'name': ReconfigVM_Task, 'duration_secs': 1.810644} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.273699] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Reconfigured VM instance instance-0000005e to attach disk [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb/ddef8de2-530e-4b94-aff1-6f7e410f44fb.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.273699] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67554f0c-8ce3-413f-bbf9-82995656b1bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.279285] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 970.279285] env[62204]: value = "task-1200204" [ 970.279285] env[62204]: _type = "Task" [ 970.279285] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.291653] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200204, 'name': Rename_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.355306] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Releasing lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 970.355658] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Instance network_info: |[{"id": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "address": "fa:16:3e:9f:69:8e", "network": {"id": "ab0d7024-e74f-4172-9333-410e94a41063", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-122713931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4350e9a0bf0c45d3b37c8dc6bddbcfa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde9750e-7c", "ovs_interfaceid": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 970.356134] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:69:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8459aaf-d6a8-46fb-ad14-464ac3104695', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bde9750e-7c68-48e5-8cae-387ef9e45fdc', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.363843] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Creating folder: Project (4350e9a0bf0c45d3b37c8dc6bddbcfa9). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.364700] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dba494aa-c7dc-4392-9566-c64e3add20ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.378707] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Created folder: Project (4350e9a0bf0c45d3b37c8dc6bddbcfa9) in parent group-v259933. [ 970.378967] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Creating folder: Instances. Parent ref: group-v260117. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 970.379262] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac03878e-0c57-4f2c-aa3c-1605e3cefcbf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.394159] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Created folder: Instances in parent group-v260117. [ 970.394159] env[62204]: DEBUG oslo.service.loopingcall [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 970.399276] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.401132] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c60171d-6fa0-4a5a-9e66-b4e7714bc5db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.421687] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.421687] env[62204]: value = "task-1200207" [ 970.421687] env[62204]: _type = "Task" [ 970.421687] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.433218] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200207, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.496807] env[62204]: DEBUG nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Received event network-vif-plugged-bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.497124] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Acquiring lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.497441] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.497749] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.497943] env[62204]: DEBUG nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] No waiting events found dispatching network-vif-plugged-bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 970.498178] env[62204]: WARNING nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Received unexpected event network-vif-plugged-bde9750e-7c68-48e5-8cae-387ef9e45fdc for instance with vm_state building and task_state spawning. [ 970.498309] env[62204]: DEBUG nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Received event network-changed-bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 970.498478] env[62204]: DEBUG nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Refreshing instance network info cache due to event network-changed-bde9750e-7c68-48e5-8cae-387ef9e45fdc. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 970.498666] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Acquiring lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.498798] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Acquired lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.498952] env[62204]: DEBUG nova.network.neutron [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Refreshing network info cache for port bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 970.536234] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7d645e-c649-4d8c-b4ab-82bd576a9cd5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.543553] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f23cdf-6d97-472c-9184-520dd3f6647d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.575886] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79bcffe1-1839-40a6-b427-092edbbfa497 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.584202] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19da1c6-5f01-4dc0-bee5-15f6befe77f0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.599287] env[62204]: DEBUG nova.compute.provider_tree [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.649076] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200203, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.735873] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.748704] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.748704] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.748704] env[62204]: DEBUG nova.network.neutron [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 970.795248] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200204, 'name': Rename_Task, 'duration_secs': 0.210297} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.795248] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.795248] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b12310c5-f313-47a6-81a5-d3e55ba19d49 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.802027] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 970.802027] env[62204]: value = "task-1200208" [ 970.802027] env[62204]: _type = "Task" [ 970.802027] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.808912] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.855268] env[62204]: DEBUG nova.network.neutron [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Successfully updated port: 6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 970.932596] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200207, 'name': CreateVM_Task, 'duration_secs': 0.404147} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.932802] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 970.933519] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 970.933723] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.934091] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 970.934395] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41b28fb0-83bf-4729-975b-dcf2810bd5da {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.939283] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 970.939283] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ad7adf-d757-fbb9-fad7-69afaa04bf47" [ 970.939283] env[62204]: _type = "Task" [ 970.939283] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.948809] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ad7adf-d757-fbb9-fad7-69afaa04bf47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.044214] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-vif-plugged-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.044524] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.044782] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.044959] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.045168] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] No waiting events found dispatching network-vif-plugged-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 971.045396] env[62204]: WARNING nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received unexpected event network-vif-plugged-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 for instance with vm_state active and task_state None. [ 971.045584] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received event network-changed-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.045739] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing instance network info cache due to event network-changed-7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 971.045932] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.046082] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.046251] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing network info cache for port 7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 971.103164] env[62204]: DEBUG nova.scheduler.client.report [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 971.147150] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200203, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.634355} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.147491] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 2178b629-4be6-473b-9a75-19efa234d442/2178b629-4be6-473b-9a75-19efa234d442.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.147772] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.148078] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f1c5e54-cf95-43ba-a820-ec2e0e4fcaf2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.154875] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 971.154875] env[62204]: value = "task-1200209" [ 971.154875] env[62204]: _type = "Task" [ 971.154875] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.166357] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200209, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.231303] env[62204]: DEBUG nova.network.neutron [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Updated VIF entry in instance network info cache for port bde9750e-7c68-48e5-8cae-387ef9e45fdc. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 971.231786] env[62204]: DEBUG nova.network.neutron [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Updating instance_info_cache with network_info: [{"id": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "address": "fa:16:3e:9f:69:8e", "network": {"id": "ab0d7024-e74f-4172-9333-410e94a41063", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-122713931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4350e9a0bf0c45d3b37c8dc6bddbcfa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde9750e-7c", "ovs_interfaceid": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.283065] env[62204]: WARNING nova.network.neutron [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] 3b20dcf5-a239-493a-bd84-9815cabea48a already exists in list: networks containing: ['3b20dcf5-a239-493a-bd84-9815cabea48a']. ignoring it [ 971.311086] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200208, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.357885] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "refresh_cache-e42444b3-51c9-4d0f-9eee-c6f2e6631997" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.357885] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "refresh_cache-e42444b3-51c9-4d0f-9eee-c6f2e6631997" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.358130] env[62204]: DEBUG nova.network.neutron [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 971.449902] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ad7adf-d757-fbb9-fad7-69afaa04bf47, 'name': SearchDatastore_Task, 'duration_secs': 0.009042} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.450255] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.450499] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.450734] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 971.450884] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.451082] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.451356] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf37c826-251c-48df-8fc7-cf4e292c2ffa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.460419] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.460604] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.461304] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9cfb5e5-fb06-49c4-9b1a-654004aa9eb8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.466678] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 971.466678] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522648c9-2c24-d656-755d-2f068ebe982c" [ 971.466678] env[62204]: _type = "Task" [ 971.466678] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.474307] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522648c9-2c24-d656-755d-2f068ebe982c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.575200] env[62204]: DEBUG nova.network.neutron [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "address": "fa:16:3e:e6:ae:0d", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c5577ec-23", "ovs_interfaceid": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.610544] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.478s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.612677] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.816s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.612919] env[62204]: DEBUG nova.objects.instance [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lazy-loading 'resources' on Instance uuid bd0f87d1-e53a-4433-afc6-6aea7e68d6f3 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.640402] env[62204]: INFO nova.network.neutron [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating port cb48dbbb-646f-445c-89d1-8c4a9e36de59 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 971.665153] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200209, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.143468} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.667491] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.668304] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184da22e-6f95-4f18-abb7-1e33d1a59072 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.692917] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 2178b629-4be6-473b-9a75-19efa234d442/2178b629-4be6-473b-9a75-19efa234d442.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.693932] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8edf9c0b-91a2-443f-91e3-fbfc65e3eaba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.713970] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 971.713970] env[62204]: value = "task-1200210" [ 971.713970] env[62204]: _type = "Task" [ 971.713970] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.722130] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.735109] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Releasing lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 971.735518] env[62204]: DEBUG nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Received event network-vif-unplugged-8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 971.735812] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Acquiring lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.736102] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.736577] env[62204]: DEBUG oslo_concurrency.lockutils [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.736713] env[62204]: DEBUG nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] No waiting events found dispatching network-vif-unplugged-8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 971.737014] env[62204]: WARNING nova.compute.manager [req-5da3696c-9f9d-4e71-8f9c-25bb83ce26c1 req-72bda66e-e0b4-477f-9021-1b7d10d727b9 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Received unexpected event network-vif-unplugged-8d2819e6-83ce-46a3-80c6-ee04624e7556 for instance with vm_state shelved_offloaded and task_state None. [ 971.813897] env[62204]: DEBUG oslo_vmware.api [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200208, 'name': PowerOnVM_Task, 'duration_secs': 0.552433} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.814174] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.814384] env[62204]: DEBUG nova.compute.manager [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 971.815201] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a282d7-578f-4bc6-a285-4c5b3e08aa2c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.827588] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.860409] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updated VIF entry in instance network info cache for port 7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 971.860777] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.908969] env[62204]: DEBUG nova.network.neutron [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 971.978856] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522648c9-2c24-d656-755d-2f068ebe982c, 'name': SearchDatastore_Task, 'duration_secs': 0.008444} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.978856] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a44cd8c-61d7-4ba6-b644-2db416047bd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.985030] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 971.985030] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c4dbb1-0ab6-45e6-0a3c-ffd8d31347a1" [ 971.985030] env[62204]: _type = "Task" [ 971.985030] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.993394] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c4dbb1-0ab6-45e6-0a3c-ffd8d31347a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.078481] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.079304] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.079499] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.080367] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0064b0-0c12-4110-bd20-fd9ac1c615d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.097358] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 972.097653] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 972.097831] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 972.098066] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 972.098195] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 972.098347] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 972.098556] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 972.098730] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 972.098884] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 972.099062] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 972.099243] env[62204]: DEBUG nova.virt.hardware [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.105249] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Reconfiguring VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 972.106243] env[62204]: DEBUG nova.network.neutron [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Updating instance_info_cache with network_info: [{"id": "6823acf4-9576-4220-9d4c-f7c640e04f3e", "address": "fa:16:3e:db:4d:49", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6823acf4-95", "ovs_interfaceid": "6823acf4-9576-4220-9d4c-f7c640e04f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.107416] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d02060e-6ac6-4792-91ea-e59b1600f073 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.120047] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "refresh_cache-e42444b3-51c9-4d0f-9eee-c6f2e6631997" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.120320] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Instance network_info: |[{"id": "6823acf4-9576-4220-9d4c-f7c640e04f3e", "address": "fa:16:3e:db:4d:49", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6823acf4-95", "ovs_interfaceid": "6823acf4-9576-4220-9d4c-f7c640e04f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 972.122890] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:4d:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6823acf4-9576-4220-9d4c-f7c640e04f3e', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 972.129965] env[62204]: DEBUG oslo.service.loopingcall [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 972.131101] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 972.131377] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-144812f5-3765-4961-80b0-d13c84145a2b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.147128] env[62204]: DEBUG oslo_vmware.api [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 972.147128] env[62204]: value = "task-1200211" [ 972.147128] env[62204]: _type = "Task" [ 972.147128] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.154230] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 972.154230] env[62204]: value = "task-1200212" [ 972.154230] env[62204]: _type = "Task" [ 972.154230] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.160530] env[62204]: DEBUG oslo_vmware.api [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200211, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.165548] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200212, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.227616] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200210, 'name': ReconfigVM_Task, 'duration_secs': 0.271213} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.230390] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 2178b629-4be6-473b-9a75-19efa234d442/2178b629-4be6-473b-9a75-19efa234d442.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.231189] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28dea116-c2d3-4f0b-8737-d7ab97ea14d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.238187] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 972.238187] env[62204]: value = "task-1200213" [ 972.238187] env[62204]: _type = "Task" [ 972.238187] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.249251] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200213, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.331038] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.366299] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.366661] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received event network-changed-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 972.366875] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing instance network info cache due to event network-changed-7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 972.367116] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquiring lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.367267] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquired lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.367433] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Refreshing network info cache for port 7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 972.381200] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1db0534-c70a-4dd7-9131-9439eee57870 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.389613] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabf8626-ec9d-46d2-a971-7ec2a6f1f4a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.423101] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce37a559-e424-404f-b739-c279a6dcb675 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.430491] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6def1359-63eb-4fbb-94f9-03dc8889faad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.444103] env[62204]: DEBUG nova.compute.provider_tree [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.494706] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c4dbb1-0ab6-45e6-0a3c-ffd8d31347a1, 'name': SearchDatastore_Task, 'duration_secs': 0.008651} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.494847] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 972.495090] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] aa336eda-d55a-4560-81bf-e4fcc6f4b485/aa336eda-d55a-4560-81bf-e4fcc6f4b485.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 972.495356] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd1ef190-4bab-40dc-a510-b9c8d4b1a8cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.502181] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 972.502181] env[62204]: value = "task-1200214" [ 972.502181] env[62204]: _type = "Task" [ 972.502181] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.510437] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.572415] env[62204]: DEBUG nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Received event network-changed-8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 972.572415] env[62204]: DEBUG nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Refreshing instance network info cache due to event network-changed-8d2819e6-83ce-46a3-80c6-ee04624e7556. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 972.572657] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Acquiring lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.572892] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Acquired lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.573143] env[62204]: DEBUG nova.network.neutron [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Refreshing network info cache for port 8d2819e6-83ce-46a3-80c6-ee04624e7556 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 972.658510] env[62204]: DEBUG oslo_vmware.api [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.666861] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200212, 'name': CreateVM_Task, 'duration_secs': 0.496122} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.667055] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 972.667785] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 972.667967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.668315] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 972.668572] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-283729d7-a253-4fc9-a0d9-ba731598a385 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.672860] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 972.672860] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ffe90d-fc37-4d33-2d99-5fa907f50eb7" [ 972.672860] env[62204]: _type = "Task" [ 972.672860] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.681512] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ffe90d-fc37-4d33-2d99-5fa907f50eb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.748402] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200213, 'name': Rename_Task, 'duration_secs': 0.217114} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.748699] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.748972] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cf8861a-8ef2-45ff-9286-2c038b5824b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.757274] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 972.757274] env[62204]: value = "task-1200215" [ 972.757274] env[62204]: _type = "Task" [ 972.757274] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.765465] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200215, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.948423] env[62204]: DEBUG nova.scheduler.client.report [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 973.013781] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200214, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.158422] env[62204]: DEBUG oslo_vmware.api [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200211, 'name': ReconfigVM_Task, 'duration_secs': 0.630114} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.159054] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.159857] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Reconfigured VM to attach interface {{(pid=62204) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 973.171486] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.174573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.174573] env[62204]: DEBUG nova.network.neutron [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 973.188534] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52ffe90d-fc37-4d33-2d99-5fa907f50eb7, 'name': SearchDatastore_Task, 'duration_secs': 0.010502} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.188928] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.189217] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 973.189486] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.189656] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.189864] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 973.190152] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3607ba41-c531-4720-b514-1e8780d09286 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.199497] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 973.199881] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 973.200623] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d63eccca-137e-461f-8393-209834581275 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.206747] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 973.206747] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5258c354-43b7-8d7d-0538-3e13c51fe506" [ 973.206747] env[62204]: _type = "Task" [ 973.206747] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.214306] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5258c354-43b7-8d7d-0538-3e13c51fe506, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.266805] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200215, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.324854] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updated VIF entry in instance network info cache for port 7394819f-3d04-4685-a087-5a61976b658a. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 973.325309] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [{"id": "7394819f-3d04-4685-a087-5a61976b658a", "address": "fa:16:3e:96:f4:0f", "network": {"id": "aa231f19-a4bc-4d7f-9be2-a2632a4f4af4", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-164334005-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.201", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43b28641aa01450b8ad70dc121642f79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7394819f-3d", "ovs_interfaceid": "7394819f-3d04-4685-a087-5a61976b658a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.450120] env[62204]: DEBUG nova.network.neutron [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updated VIF entry in instance network info cache for port 8d2819e6-83ce-46a3-80c6-ee04624e7556. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 973.450512] env[62204]: DEBUG nova.network.neutron [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updating instance_info_cache with network_info: [{"id": "8d2819e6-83ce-46a3-80c6-ee04624e7556", "address": "fa:16:3e:a0:dd:91", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": null, "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8d2819e6-83", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.453758] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.456851] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.768s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.456851] env[62204]: DEBUG nova.objects.instance [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'resources' on Instance uuid 2727dc46-98ed-435d-89ef-41bc20cda776 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.473025] env[62204]: INFO nova.scheduler.client.report [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted allocations for instance bd0f87d1-e53a-4433-afc6-6aea7e68d6f3 [ 973.514016] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558007} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.514293] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] aa336eda-d55a-4560-81bf-e4fcc6f4b485/aa336eda-d55a-4560-81bf-e4fcc6f4b485.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 973.514534] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 973.514809] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6defccf1-6fda-4a5e-a195-ea0abf9c68cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.520915] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 973.520915] env[62204]: value = "task-1200216" [ 973.520915] env[62204]: _type = "Task" [ 973.520915] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.528899] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200216, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.669678] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1ca07ec0-8bd6-4fc7-8bce-3e9fd90f7b54 tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-7671c77f-3da8-4a41-a472-138c7bd23a92-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.763s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.718182] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5258c354-43b7-8d7d-0538-3e13c51fe506, 'name': SearchDatastore_Task, 'duration_secs': 0.008398} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.718993] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab711343-5ff9-4a0c-af76-97045ea301a2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.724204] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 973.724204] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520ad597-ae58-6e7a-3ddd-160edd086946" [ 973.724204] env[62204]: _type = "Task" [ 973.724204] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.731692] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520ad597-ae58-6e7a-3ddd-160edd086946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.768554] env[62204]: DEBUG oslo_vmware.api [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200215, 'name': PowerOnVM_Task, 'duration_secs': 0.611689} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.769048] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 973.769682] env[62204]: INFO nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Took 8.41 seconds to spawn the instance on the hypervisor. [ 973.769682] env[62204]: DEBUG nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 973.770346] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46cdd2f9-df41-4354-9822-f27b029c9d49 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.829179] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Releasing lock "refresh_cache-d97d792d-614f-42e3-8516-6c0a7cf15ad5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.829473] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-changed-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.829647] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing instance network info cache due to event network-changed-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 973.829903] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.830070] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.830445] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Refreshing network info cache for port 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 973.953280] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Releasing lock "refresh_cache-d43dafa1-770f-4455-a3d8-9d08742b1fb6" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 973.953565] env[62204]: DEBUG nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Received event network-vif-plugged-6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.953764] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Acquiring lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.953974] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.954174] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.954347] env[62204]: DEBUG nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] No waiting events found dispatching network-vif-plugged-6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 973.954537] env[62204]: WARNING nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Received unexpected event network-vif-plugged-6823acf4-9576-4220-9d4c-f7c640e04f3e for instance with vm_state building and task_state spawning. [ 973.954720] env[62204]: DEBUG nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Received event network-changed-6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.954882] env[62204]: DEBUG nova.compute.manager [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Refreshing instance network info cache due to event network-changed-6823acf4-9576-4220-9d4c-f7c640e04f3e. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 973.955086] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Acquiring lock "refresh_cache-e42444b3-51c9-4d0f-9eee-c6f2e6631997" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 973.955235] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Acquired lock "refresh_cache-e42444b3-51c9-4d0f-9eee-c6f2e6631997" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.955397] env[62204]: DEBUG nova.network.neutron [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Refreshing network info cache for port 6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 973.981847] env[62204]: DEBUG oslo_concurrency.lockutils [None req-041f6efd-c55b-4abe-ade3-4bed30d95292 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "bd0f87d1-e53a-4433-afc6-6aea7e68d6f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.579s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.994778] env[62204]: DEBUG nova.network.neutron [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.032240] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200216, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.255007} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.032568] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.033378] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b310c0-39db-4057-842b-3a5771582413 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.055635] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] aa336eda-d55a-4560-81bf-e4fcc6f4b485/aa336eda-d55a-4560-81bf-e4fcc6f4b485.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.058250] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17a2fb30-9167-41f0-bf64-85fc4bfc30e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.080928] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 974.080928] env[62204]: value = "task-1200217" [ 974.080928] env[62204]: _type = "Task" [ 974.080928] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.093617] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200217, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.224032] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8481b3a3-8396-44ef-b8b6-f736038a7bc2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.237756] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520ad597-ae58-6e7a-3ddd-160edd086946, 'name': SearchDatastore_Task, 'duration_secs': 0.039771} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.238821] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1cf667-041c-4d97-88dc-f98153ac3b08 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.242111] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.242475] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] e42444b3-51c9-4d0f-9eee-c6f2e6631997/e42444b3-51c9-4d0f-9eee-c6f2e6631997.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 974.242802] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1d11964-1639-4718-814b-02ae8156a9a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.275952] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e098d99a-fdef-47a8-a085-8ded1794f50b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.278931] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 974.278931] env[62204]: value = "task-1200218" [ 974.278931] env[62204]: _type = "Task" [ 974.278931] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.289987] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c94674-d00c-47d9-8fee-682227036022 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.299010] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200218, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.299558] env[62204]: INFO nova.compute.manager [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Took 24.57 seconds to build instance. [ 974.310347] env[62204]: DEBUG nova.compute.provider_tree [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.498083] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 974.527999] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6c0c749fc59a1ecb9358d78894429ece',container_format='bare',created_at=2024-10-08T23:43:17Z,direct_url=,disk_format='vmdk',id=64aeea2b-e127-4ab2-abff-027b5881ee9a,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-452598165-shelved',owner='7f1dbef99d9946d58fbe59f2850f6c63',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-10-08T23:43:33Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 974.528244] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 974.528428] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.528601] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 974.528759] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.528917] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 974.529172] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 974.529360] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 974.529582] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 974.529769] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 974.529952] env[62204]: DEBUG nova.virt.hardware [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 974.530950] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b414eb-928d-4014-ac3e-081f0c2cfa3d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.539119] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55907e2-c0a9-42fd-859e-26de1aab3699 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.558070] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:96:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '255460d5-71d4-4bfd-87f1-acc10085db7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb48dbbb-646f-445c-89d1-8c4a9e36de59', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.565581] env[62204]: DEBUG oslo.service.loopingcall [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 974.565924] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.566188] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-374bf721-dedf-4088-81cf-405faa3d8671 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.585895] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.586225] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.586444] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.586663] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.586844] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.589187] env[62204]: INFO nova.compute.manager [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Terminating instance [ 974.595821] env[62204]: DEBUG nova.compute.manager [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 974.596067] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 974.597212] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d5b2a4-2288-44f9-824b-edad92ba75aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.602025] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.602025] env[62204]: value = "task-1200219" [ 974.602025] env[62204]: _type = "Task" [ 974.602025] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.610760] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.611100] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200217, 'name': ReconfigVM_Task, 'duration_secs': 0.45302} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.611709] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3da69216-3335-4626-bd21-49a7af01d79e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.613418] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Reconfigured VM instance instance-00000062 to attach disk [datastore1] aa336eda-d55a-4560-81bf-e4fcc6f4b485/aa336eda-d55a-4560-81bf-e4fcc6f4b485.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.617237] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-faade447-7918-41a2-9512-e9470d7df2d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.618880] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200219, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.623935] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 974.623935] env[62204]: value = "task-1200221" [ 974.623935] env[62204]: _type = "Task" [ 974.623935] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.628619] env[62204]: DEBUG oslo_vmware.api [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 974.628619] env[62204]: value = "task-1200220" [ 974.628619] env[62204]: _type = "Task" [ 974.628619] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.636292] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200221, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.644197] env[62204]: DEBUG oslo_vmware.api [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.732924] env[62204]: DEBUG nova.compute.manager [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-vif-plugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.733200] env[62204]: DEBUG oslo_concurrency.lockutils [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 974.733437] env[62204]: DEBUG oslo_concurrency.lockutils [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.733644] env[62204]: DEBUG oslo_concurrency.lockutils [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.733868] env[62204]: DEBUG nova.compute.manager [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] No waiting events found dispatching network-vif-plugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 974.734109] env[62204]: WARNING nova.compute.manager [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received unexpected event network-vif-plugged-cb48dbbb-646f-445c-89d1-8c4a9e36de59 for instance with vm_state shelved_offloaded and task_state spawning. [ 974.734309] env[62204]: DEBUG nova.compute.manager [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-changed-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 974.734547] env[62204]: DEBUG nova.compute.manager [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Refreshing instance network info cache due to event network-changed-cb48dbbb-646f-445c-89d1-8c4a9e36de59. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 974.734757] env[62204]: DEBUG oslo_concurrency.lockutils [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.734914] env[62204]: DEBUG oslo_concurrency.lockutils [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.735132] env[62204]: DEBUG nova.network.neutron [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Refreshing network info cache for port cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 974.791985] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200218, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.801962] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e2db0d10-f7f0-4550-94db-414862617b08 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.093s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.813400] env[62204]: DEBUG nova.scheduler.client.report [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.937643] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updated VIF entry in instance network info cache for port 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 974.937959] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "address": "fa:16:3e:e6:ae:0d", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c5577ec-23", "ovs_interfaceid": "5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.066457] env[62204]: DEBUG nova.network.neutron [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Updated VIF entry in instance network info cache for port 6823acf4-9576-4220-9d4c-f7c640e04f3e. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 975.067204] env[62204]: DEBUG nova.network.neutron [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Updating instance_info_cache with network_info: [{"id": "6823acf4-9576-4220-9d4c-f7c640e04f3e", "address": "fa:16:3e:db:4d:49", "network": {"id": "79f56bec-5c1d-44a1-837f-64c3d5e58e13", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1924183901-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fb9a24ef26c74781a2ad36e3430ce630", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffc811e4-0e4e-4748-8c7e-b3f14ccbd42d", "external-id": "nsx-vlan-transportzone-404", "segmentation_id": 404, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6823acf4-95", "ovs_interfaceid": "6823acf4-9576-4220-9d4c-f7c640e04f3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.113116] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200219, 'name': CreateVM_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.132692] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200221, 'name': Rename_Task, 'duration_secs': 0.189711} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.138159] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 975.138933] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4523845-c0b1-4513-9ebf-d0b63bbd7de0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.145754] env[62204]: DEBUG oslo_vmware.api [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200220, 'name': PowerOffVM_Task, 'duration_secs': 0.411566} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.147064] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.147259] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.147621] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 975.147621] env[62204]: value = "task-1200222" [ 975.147621] env[62204]: _type = "Task" [ 975.147621] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.147776] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da49183c-d6b2-4f63-8042-31cfd6634ede {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.157323] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.224016] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.224277] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.224497] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleting the datastore file [datastore2] ddef8de2-530e-4b94-aff1-6f7e410f44fb {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.224782] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1eabe35b-2122-4499-843b-69b9e387fe85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.231148] env[62204]: DEBUG oslo_vmware.api [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 975.231148] env[62204]: value = "task-1200224" [ 975.231148] env[62204]: _type = "Task" [ 975.231148] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.241343] env[62204]: DEBUG oslo_vmware.api [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.288961] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200218, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637268} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.289336] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] e42444b3-51c9-4d0f-9eee-c6f2e6631997/e42444b3-51c9-4d0f-9eee-c6f2e6631997.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 975.289580] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 975.291998] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a80f44ba-3bce-43fd-aae0-eb282e439ab9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.298442] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 975.298442] env[62204]: value = "task-1200225" [ 975.298442] env[62204]: _type = "Task" [ 975.298442] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.306459] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.327629] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.330156] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.705s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.330403] env[62204]: DEBUG nova.objects.instance [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid 0a383305-5b3b-4a7d-8834-d31e54eb4ba5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.352219] env[62204]: INFO nova.scheduler.client.report [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted allocations for instance 2727dc46-98ed-435d-89ef-41bc20cda776 [ 975.417099] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "interface-7671c77f-3da8-4a41-a472-138c7bd23a92-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.417379] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-7671c77f-3da8-4a41-a472-138c7bd23a92-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.440669] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.442031] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Received event network-changed-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.442031] env[62204]: DEBUG nova.compute.manager [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Refreshing instance network info cache due to event network-changed-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 975.442031] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.442031] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.442031] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Refreshing network info cache for port a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 975.571814] env[62204]: DEBUG oslo_concurrency.lockutils [req-37e4413d-bfe0-40e7-815c-ed444fca0c16 req-1a19bce2-9599-4dee-9d3c-500c3e8a9930 service nova] Releasing lock "refresh_cache-e42444b3-51c9-4d0f-9eee-c6f2e6631997" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 975.609074] env[62204]: DEBUG nova.network.neutron [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updated VIF entry in instance network info cache for port cb48dbbb-646f-445c-89d1-8c4a9e36de59. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 975.609674] env[62204]: DEBUG nova.network.neutron [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.619067] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200219, 'name': CreateVM_Task, 'duration_secs': 0.727316} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.619067] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.619603] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.619778] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.620159] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 975.620421] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d9cd2d4-b011-4af4-9385-ab7fa6d3bda3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.625732] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 975.625732] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b50a6b-ae20-eaa6-04da-9a35bb75a489" [ 975.625732] env[62204]: _type = "Task" [ 975.625732] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.635709] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52b50a6b-ae20-eaa6-04da-9a35bb75a489, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.659045] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200222, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.742017] env[62204]: DEBUG oslo_vmware.api [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280109} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.742381] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.742669] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.742958] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.743489] env[62204]: INFO nova.compute.manager [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 975.743657] env[62204]: DEBUG oslo.service.loopingcall [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 975.743938] env[62204]: DEBUG nova.compute.manager [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 975.744058] env[62204]: DEBUG nova.network.neutron [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 975.808033] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115375} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.808197] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 975.808886] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc84c62-9663-4023-b3a9-fde3e3bbdcb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.830744] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] e42444b3-51c9-4d0f-9eee-c6f2e6631997/e42444b3-51c9-4d0f-9eee-c6f2e6631997.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 975.831085] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ecd5d96-6b06-4d4f-96e7-11051edea812 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.855256] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 975.855256] env[62204]: value = "task-1200226" [ 975.855256] env[62204]: _type = "Task" [ 975.855256] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.861409] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67cad8cd-5cdb-4326-9def-6cf2593d7bd9 tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "2727dc46-98ed-435d-89ef-41bc20cda776" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.834s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.866608] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200226, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.920998] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.920998] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.922313] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb902b6b-a33a-46dc-8e46-cd687cc3bb09 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.947065] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026957ce-68b8-406f-8984-892aa5240dff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.978682] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Reconfiguring VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 975.982600] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-beaec083-ba72-4436-9fd4-fc9ef37d4593 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.004680] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 976.004680] env[62204]: value = "task-1200227" [ 976.004680] env[62204]: _type = "Task" [ 976.004680] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.017420] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.075438] env[62204]: DEBUG nova.compute.manager [req-643fe61d-5a2c-41df-a4a7-ca6db8d9ee66 req-667116e8-a4ec-4aed-9c82-e2dbfca3ae1f service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Received event network-vif-deleted-ca6a6668-486c-47f3-bbb8-5902729c6304 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 976.075714] env[62204]: INFO nova.compute.manager [req-643fe61d-5a2c-41df-a4a7-ca6db8d9ee66 req-667116e8-a4ec-4aed-9c82-e2dbfca3ae1f service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Neutron deleted interface ca6a6668-486c-47f3-bbb8-5902729c6304; detaching it from the instance and deleting it from the info cache [ 976.075959] env[62204]: DEBUG nova.network.neutron [req-643fe61d-5a2c-41df-a4a7-ca6db8d9ee66 req-667116e8-a4ec-4aed-9c82-e2dbfca3ae1f service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.113672] env[62204]: DEBUG oslo_concurrency.lockutils [req-9ea1149b-6d13-4ae6-8574-c0365b13adf2 req-a49b3633-6191-4108-90b8-675576f37503 service nova] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.140878] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.141129] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Processing image 64aeea2b-e127-4ab2-abff-027b5881ee9a {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 976.141423] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 976.141649] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.141866] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 976.142390] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b26ee0c5-d9bf-46ee-b1a8-0fa54d85f5f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.151755] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 976.151966] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 976.158380] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e745cec0-5355-4899-81b3-6dc5dd782c60 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.167249] env[62204]: DEBUG oslo_vmware.api [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200222, 'name': PowerOnVM_Task, 'duration_secs': 0.923721} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.168110] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.168360] env[62204]: INFO nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Took 8.43 seconds to spawn the instance on the hypervisor. [ 976.168580] env[62204]: DEBUG nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 976.169380] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 976.169380] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f6d2a2-0862-c61e-2746-817b6eeafb31" [ 976.169380] env[62204]: _type = "Task" [ 976.169380] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.170035] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aec88fd-2d64-4665-999e-405334914789 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.190548] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 976.190802] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Fetch image to [datastore1] OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c/OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 976.191159] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Downloading stream optimized image 64aeea2b-e127-4ab2-abff-027b5881ee9a to [datastore1] OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c/OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c.vmdk on the data store datastore1 as vApp {{(pid=62204) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 976.191244] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Downloading image file data 64aeea2b-e127-4ab2-abff-027b5881ee9a to the ESX as VM named 'OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c' {{(pid=62204) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 976.242601] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5af3a6-165b-45b4-8093-f9f8133cc905 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.251124] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed35a66c-bf02-46bc-b835-9581d208b6fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.284636] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0e971b-37b2-417f-b55f-eabd434695ce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.295549] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43301ef-2198-45e4-b673-e2749cf26327 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.311016] env[62204]: DEBUG nova.compute.provider_tree [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.312891] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 976.312891] env[62204]: value = "resgroup-9" [ 976.312891] env[62204]: _type = "ResourcePool" [ 976.312891] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 976.313273] env[62204]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2fec8ae0-dcbe-4fc0-bfcb-da2d2b32c3f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.334172] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lease: (returnval){ [ 976.334172] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 976.334172] env[62204]: _type = "HttpNfcLease" [ 976.334172] env[62204]: } obtained for vApp import into resource pool (val){ [ 976.334172] env[62204]: value = "resgroup-9" [ 976.334172] env[62204]: _type = "ResourcePool" [ 976.334172] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 976.334480] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the lease: (returnval){ [ 976.334480] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 976.334480] env[62204]: _type = "HttpNfcLease" [ 976.334480] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 976.343434] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 976.343434] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 976.343434] env[62204]: _type = "HttpNfcLease" [ 976.343434] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 976.365199] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200226, 'name': ReconfigVM_Task, 'duration_secs': 0.3652} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.368367] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Reconfigured VM instance instance-00000063 to attach disk [datastore1] e42444b3-51c9-4d0f-9eee-c6f2e6631997/e42444b3-51c9-4d0f-9eee-c6f2e6631997.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 976.369103] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02b7eeb4-d903-4c38-bce1-503c35c50a7a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.375727] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 976.375727] env[62204]: value = "task-1200229" [ 976.375727] env[62204]: _type = "Task" [ 976.375727] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.384828] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200229, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.409335] env[62204]: DEBUG nova.compute.manager [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Stashing vm_state: active {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 976.418661] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updated VIF entry in instance network info cache for port a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 976.419273] env[62204]: DEBUG nova.network.neutron [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.515499] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.529401] env[62204]: DEBUG nova.network.neutron [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.579187] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2c71ef1-ed0b-495b-b2c8-20babff3ce8f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.588566] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e37850c-b28b-457f-8edf-56f47cb043f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.623486] env[62204]: DEBUG nova.compute.manager [req-643fe61d-5a2c-41df-a4a7-ca6db8d9ee66 req-667116e8-a4ec-4aed-9c82-e2dbfca3ae1f service nova] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Detach interface failed, port_id=ca6a6668-486c-47f3-bbb8-5902729c6304, reason: Instance ddef8de2-530e-4b94-aff1-6f7e410f44fb could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 976.696680] env[62204]: INFO nova.compute.manager [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Took 21.01 seconds to build instance. [ 976.829487] env[62204]: DEBUG nova.scheduler.client.report [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.843826] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 976.843826] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 976.843826] env[62204]: _type = "HttpNfcLease" [ 976.843826] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 976.885231] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200229, 'name': Rename_Task, 'duration_secs': 0.289229} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.885742] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.887274] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03870ddf-d3ab-48ee-9e67-d41c668bea24 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.895082] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 976.895082] env[62204]: value = "task-1200230" [ 976.895082] env[62204]: _type = "Task" [ 976.895082] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.904341] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.921891] env[62204]: DEBUG oslo_concurrency.lockutils [req-60526bc7-76c4-4941-85c1-424fdbce155e req-243eddb9-bca1-407a-a8f0-1253cb8a5c7d service nova] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.929785] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.016235] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.033270] env[62204]: INFO nova.compute.manager [-] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Took 1.29 seconds to deallocate network for instance. [ 977.199497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-488c3ccb-c859-4936-b66d-433432b1a408 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.526s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.339297] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.344020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.607s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.344020] env[62204]: DEBUG nova.objects.instance [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'resources' on Instance uuid d43dafa1-770f-4455-a3d8-9d08742b1fb6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.354287] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 977.354287] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 977.354287] env[62204]: _type = "HttpNfcLease" [ 977.354287] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 977.365356] env[62204]: INFO nova.scheduler.client.report [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance 0a383305-5b3b-4a7d-8834-d31e54eb4ba5 [ 977.409034] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200230, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.516027] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.540186] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.843519] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.843818] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 977.844193] env[62204]: INFO nova.compute.manager [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Rebooting instance [ 977.849953] env[62204]: DEBUG nova.objects.instance [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'numa_topology' on Instance uuid d43dafa1-770f-4455-a3d8-9d08742b1fb6 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.854734] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 977.854734] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 977.854734] env[62204]: _type = "HttpNfcLease" [ 977.854734] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 977.873229] env[62204]: DEBUG oslo_concurrency.lockutils [None req-67b99d47-e058-4725-bf19-fff77bfe1b0e tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0a383305-5b3b-4a7d-8834-d31e54eb4ba5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.353s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 977.906058] env[62204]: DEBUG oslo_vmware.api [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200230, 'name': PowerOnVM_Task, 'duration_secs': 1.003762} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.906424] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.906865] env[62204]: INFO nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Took 7.73 seconds to spawn the instance on the hypervisor. [ 977.906980] env[62204]: DEBUG nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 977.907863] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fea358-10aa-4a1c-9b4c-8534f5ce2c02 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.017037] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.353797] env[62204]: DEBUG nova.objects.base [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 978.355928] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.355928] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 978.355928] env[62204]: _type = "HttpNfcLease" [ 978.355928] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.371172] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 978.371363] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquired lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.371790] env[62204]: DEBUG nova.network.neutron [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 978.429814] env[62204]: INFO nova.compute.manager [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Took 17.34 seconds to build instance. [ 978.488307] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 978.488631] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 978.520936] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.598768] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd770daf-9499-4e84-b665-eba544a83dfa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.606956] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c9e41b-f137-4cdb-b509-75472afd0946 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.637961] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aecebb9-a142-4d93-b937-2262c668b21f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.645418] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04b0ef5-adfd-4cc0-9edf-a26de7e16dee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.659418] env[62204]: DEBUG nova.compute.provider_tree [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.853273] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.853273] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 978.853273] env[62204]: _type = "HttpNfcLease" [ 978.853273] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.932664] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4bba5cc-2a4a-4b1c-929f-d2468a53a0c4 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.849s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 978.992435] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 979.021081] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.162211] env[62204]: DEBUG nova.scheduler.client.report [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 979.354930] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.354930] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 979.354930] env[62204]: _type = "HttpNfcLease" [ 979.354930] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.389034] env[62204]: DEBUG nova.network.neutron [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Updating instance_info_cache with network_info: [{"id": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "address": "fa:16:3e:9f:69:8e", "network": {"id": "ab0d7024-e74f-4172-9333-410e94a41063", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-122713931-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4350e9a0bf0c45d3b37c8dc6bddbcfa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8459aaf-d6a8-46fb-ad14-464ac3104695", "external-id": "nsx-vlan-transportzone-46", "segmentation_id": 46, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde9750e-7c", "ovs_interfaceid": "bde9750e-7c68-48e5-8cae-387ef9e45fdc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.514414] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.520231] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.542946] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "ade509d8-5d7c-4926-bb2f-067dce84f76c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.543161] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.668644] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.326s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.671185] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.340s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.671384] env[62204]: DEBUG nova.objects.instance [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62204) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 979.854268] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.854268] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 979.854268] env[62204]: _type = "HttpNfcLease" [ 979.854268] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.892876] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Releasing lock "refresh_cache-aa336eda-d55a-4560-81bf-e4fcc6f4b485" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 979.894455] env[62204]: DEBUG nova.compute.manager [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 979.895319] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a7ae16-8028-47eb-a74a-92e84aad7e18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.945661] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 979.946111] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.020385] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.045520] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 980.183613] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d6b68a6e-f57f-4417-9e1a-40e1157e28fe tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 30.499s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.184410] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 8.357s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.184661] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.184880] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.185065] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.188747] env[62204]: INFO nova.compute.manager [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Terminating instance [ 980.191031] env[62204]: DEBUG nova.compute.manager [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 980.191145] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 980.191577] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-963c4632-806b-4788-a77d-9378637df58c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.205321] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d7a1ea-817c-417d-8d52-056ce68d8761 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.239395] env[62204]: WARNING nova.virt.vmwareapi.vmops [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d43dafa1-770f-4455-a3d8-9d08742b1fb6 could not be found. [ 980.239629] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.239833] env[62204]: INFO nova.compute.manager [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 980.240120] env[62204]: DEBUG oslo.service.loopingcall [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 980.240702] env[62204]: DEBUG nova.compute.manager [-] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 980.240807] env[62204]: DEBUG nova.network.neutron [-] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 980.352990] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 980.352990] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 980.352990] env[62204]: _type = "HttpNfcLease" [ 980.352990] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 980.456018] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.456297] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 980.524504] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.567623] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.682178] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e040c702-b58e-460e-ad10-12127d022d09 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 980.684238] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.754s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.704612] env[62204]: DEBUG nova.compute.manager [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 980.705001] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d150ebc-aa6c-48c9-babf-488590a3b1fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.853428] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 980.853428] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 980.853428] env[62204]: _type = "HttpNfcLease" [ 980.853428] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 980.853739] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 980.853739] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52eb8c7a-0107-a215-9907-feb92bb73ed6" [ 980.853739] env[62204]: _type = "HttpNfcLease" [ 980.853739] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 980.854464] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908fac5c-fb2a-44e8-9f30-0c6041e08f3f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.861616] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52db1536-8850-d0d4-ec50-1bd1726ca3c1/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 980.861797] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52db1536-8850-d0d4-ec50-1bd1726ca3c1/disk-0.vmdk. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 980.919185] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0d35cc-327d-4993-a236-6bc36765f7f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.929994] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2aefbec5-9cc2-4f55-9c76-d8d03dddd5a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.931407] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Doing hard reboot of VM {{(pid=62204) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 980.932682] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-494940d5-8e84-496b-9368-528fa5ba1895 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.941524] env[62204]: DEBUG oslo_vmware.api [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 980.941524] env[62204]: value = "task-1200231" [ 980.941524] env[62204]: _type = "Task" [ 980.941524] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.949205] env[62204]: DEBUG oslo_vmware.api [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200231, 'name': ResetVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.961017] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Didn't find any instances for network info cache update. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 980.961282] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.961538] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.961746] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.961943] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.962153] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.962307] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.962443] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 980.962591] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.020409] env[62204]: DEBUG nova.network.neutron [-] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.021768] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.190895] env[62204]: INFO nova.compute.claims [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 981.216723] env[62204]: INFO nova.compute.manager [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] instance snapshotting [ 981.220043] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce623494-12dd-4f80-ac12-8900ed392e36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.248490] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4e6bf4-c582-4bc9-b193-c522d08b6649 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.453865] env[62204]: DEBUG oslo_vmware.api [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200231, 'name': ResetVM_Task, 'duration_secs': 0.100312} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.455608] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Did hard reboot of VM {{(pid=62204) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 981.455806] env[62204]: DEBUG nova.compute.manager [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 981.456709] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1748f030-fe1c-4980-a583-9b09912e1b68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.466938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.523714] env[62204]: INFO nova.compute.manager [-] [instance: d43dafa1-770f-4455-a3d8-9d08742b1fb6] Took 1.28 seconds to deallocate network for instance. [ 981.524059] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.698626] env[62204]: INFO nova.compute.resource_tracker [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating resource usage from migration aa2b8ed9-0c99-4ff5-adda-44f43b2f020e [ 981.763995] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 981.764662] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1dd169d7-b90e-467d-94f5-0459dc655a1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.773406] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 981.773406] env[62204]: value = "task-1200232" [ 981.773406] env[62204]: _type = "Task" [ 981.773406] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.786889] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200232, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.974500] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14606355-4f70-4219-9742-653ebcbb5d68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.978246] env[62204]: DEBUG oslo_concurrency.lockutils [None req-57dcb964-5c52-4fa6-9baa-69bf3aea1641 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.134s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.984533] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3640bf-4e71-4413-9804-b171c6114768 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.024114] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39ac1d1-c1f9-40de-88f0-7d679de83026 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.035713] env[62204]: DEBUG oslo_vmware.api [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200227, 'name': ReconfigVM_Task, 'duration_secs': 5.751781} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.040994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.041177] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Reconfigured VM to detach interface {{(pid=62204) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 982.044860] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff10f4c-ee11-4e94-84bd-5a57f4b77b48 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.065719] env[62204]: DEBUG nova.compute.provider_tree [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.223369] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 982.223697] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52db1536-8850-d0d4-ec50-1bd1726ca3c1/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 982.225148] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77d2611-c602-4f1a-9e3e-0a2efd45f2eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.234232] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52db1536-8850-d0d4-ec50-1bd1726ca3c1/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 982.234562] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52db1536-8850-d0d4-ec50-1bd1726ca3c1/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 982.234918] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-48f1f014-2ccf-4ec6-acd5-342bfd11ad43 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.287981] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200232, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.535243] env[62204]: DEBUG oslo_vmware.rw_handles [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52db1536-8850-d0d4-ec50-1bd1726ca3c1/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 982.535509] env[62204]: INFO nova.virt.vmwareapi.images [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Downloaded image file data 64aeea2b-e127-4ab2-abff-027b5881ee9a [ 982.536481] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89097784-f8b0-479b-9138-8f3e350011e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.554919] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f6e6e15-2540-4c5d-be15-accb7ee41603 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.559788] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b525304a-3100-432a-856e-95994a2aeb31 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "d43dafa1-770f-4455-a3d8-9d08742b1fb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.375s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.572068] env[62204]: DEBUG nova.scheduler.client.report [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.607390] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.607818] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.608129] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 982.608382] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 982.608561] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 982.611816] env[62204]: INFO nova.virt.vmwareapi.images [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] The imported VM was unregistered [ 982.613988] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 982.613988] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Creating directory with path [datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.615021] env[62204]: INFO nova.compute.manager [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Terminating instance [ 982.615995] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c419050-283d-4efc-a6f3-63a9af8e7762 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.621015] env[62204]: DEBUG nova.compute.manager [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 982.621015] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 982.621015] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba55eec-c006-457c-b7c3-c98ff8328713 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.629310] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 982.629605] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aaeff934-7c13-43f6-9ed5-fa3533ccc51a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.636230] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 982.636230] env[62204]: value = "task-1200234" [ 982.636230] env[62204]: _type = "Task" [ 982.636230] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.642329] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Created directory with path [datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.642846] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c/OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c.vmdk to [datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk. {{(pid=62204) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 982.646136] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2cdf660e-6db9-43c3-a61e-4c46be56dbc7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.648235] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200234, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.652801] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 982.652801] env[62204]: value = "task-1200235" [ 982.652801] env[62204]: _type = "Task" [ 982.652801] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.661076] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.787200] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200232, 'name': CreateSnapshot_Task, 'duration_secs': 0.817917} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.787502] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 982.788316] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb26598-e292-45d4-b7fc-dd717d195fe2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.078592] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.394s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.078592] env[62204]: INFO nova.compute.manager [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Migrating [ 983.086027] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.546s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.086271] env[62204]: DEBUG nova.objects.instance [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'resources' on Instance uuid ddef8de2-530e-4b94-aff1-6f7e410f44fb {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.146016] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200234, 'name': PowerOffVM_Task, 'duration_secs': 0.364147} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.146316] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.146482] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.146793] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-149a35b1-cd51-4782-a48e-3a7e47a7dcb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.162467] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.229944] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.230227] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.230421] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Deleting the datastore file [datastore1] aa336eda-d55a-4560-81bf-e4fcc6f4b485 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.230698] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93bbc0b0-7ea2-41fb-ac43-2831cd0c462d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.238198] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for the task: (returnval){ [ 983.238198] env[62204]: value = "task-1200237" [ 983.238198] env[62204]: _type = "Task" [ 983.238198] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.250028] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.262183] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.262482] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.262680] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.262871] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.263060] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.265355] env[62204]: INFO nova.compute.manager [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Terminating instance [ 983.267356] env[62204]: DEBUG nova.compute.manager [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 983.267555] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.268432] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d13c8c-d38e-4edc-8077-df6369aebeca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.279087] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.279409] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35e8fade-bf70-46f1-aba4-389557c059a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.288354] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 983.288354] env[62204]: value = "task-1200238" [ 983.288354] env[62204]: _type = "Task" [ 983.288354] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.299661] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.308226] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 983.308810] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-09317622-1264-44fc-922b-845631516ace {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.320606] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 983.320606] env[62204]: value = "task-1200239" [ 983.320606] env[62204]: _type = "Task" [ 983.320606] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.334072] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.599847] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.600226] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.600511] env[62204]: DEBUG nova.network.neutron [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.670643] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.683193] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.683193] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquired lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.683371] env[62204]: DEBUG nova.network.neutron [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 983.753818] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.804892] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200238, 'name': PowerOffVM_Task, 'duration_secs': 0.305673} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.805467] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 983.806768] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.806768] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a38e62f-1263-4aa6-a562-5c43040fcdfd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.830662] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 93%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.878991] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.879738] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.879738] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleting the datastore file [datastore1] 9cf3ca28-443f-4e06-9f04-103b5b6cddd4 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.879738] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-527463d0-d0f3-4efc-9661-d8a745e23e51 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.894282] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 983.894282] env[62204]: value = "task-1200241" [ 983.894282] env[62204]: _type = "Task" [ 983.894282] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.896759] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3208ee6d-f0a9-4dae-a652-2e4e719d9339 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.909023] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.914092] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a042a56b-a8f5-4f15-8a8f-fd10c982a290 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.951365] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda9c1bd-8772-47fd-a464-3da4f2d34e74 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.963468] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd12616-0cbd-46dd-a0a0-98719ccd5e03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.981091] env[62204]: DEBUG nova.compute.provider_tree [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.014033] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.014343] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.014557] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.014748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.014920] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.017439] env[62204]: INFO nova.compute.manager [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Terminating instance [ 984.019630] env[62204]: DEBUG nova.compute.manager [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 984.019838] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 984.020684] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ec08fa-e112-4dde-8fa3-d4f2dd2358fc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.033323] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.033480] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8762887-d95c-44c5-abd2-7e24017646d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.045292] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 984.045292] env[62204]: value = "task-1200242" [ 984.045292] env[62204]: _type = "Task" [ 984.045292] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.058052] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.110695] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 984.111047] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.168865] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.256910] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.334197] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.408593] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.442057] env[62204]: DEBUG nova.network.neutron [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [{"id": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "address": "fa:16:3e:27:10:6c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba57ac2e-ab", "ovs_interfaceid": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.484891] env[62204]: DEBUG nova.scheduler.client.report [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 984.558725] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.559900] env[62204]: INFO nova.network.neutron [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Port 5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 984.560291] env[62204]: DEBUG nova.network.neutron [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [{"id": "4243893b-2fda-4a71-94f3-332643bceb52", "address": "fa:16:3e:5e:e9:38", "network": {"id": "3b20dcf5-a239-493a-bd84-9815cabea48a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-487595589-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "81dc15a8604e4900845b79c75cc5ef16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4243893b-2f", "ovs_interfaceid": "4243893b-2fda-4a71-94f3-332643bceb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.613959] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 984.670804] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.753917] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.836699] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.911781] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.945489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.992440] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 984.995334] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.481s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 984.997208] env[62204]: INFO nova.compute.claims [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.018563] env[62204]: INFO nova.scheduler.client.report [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted allocations for instance ddef8de2-530e-4b94-aff1-6f7e410f44fb [ 985.059243] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200242, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.063128] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Releasing lock "refresh_cache-7671c77f-3da8-4a41-a472-138c7bd23a92" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.137464] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.170914] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.255260] env[62204]: DEBUG oslo_vmware.api [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Task: {'id': task-1200237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.798005} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.256020] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.256020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.256020] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.256183] env[62204]: INFO nova.compute.manager [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Took 2.64 seconds to destroy the instance on the hypervisor. [ 985.256338] env[62204]: DEBUG oslo.service.loopingcall [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.256576] env[62204]: DEBUG nova.compute.manager [-] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 985.256683] env[62204]: DEBUG nova.network.neutron [-] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 985.336884] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.407114] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.524227] env[62204]: DEBUG nova.compute.manager [req-4aa6e7a1-c293-4aa6-9cf2-85c01dde3106 req-2507bb5f-55c0-4c2f-9f55-1f5a2f309898 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Received event network-vif-deleted-bde9750e-7c68-48e5-8cae-387ef9e45fdc {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.524630] env[62204]: INFO nova.compute.manager [req-4aa6e7a1-c293-4aa6-9cf2-85c01dde3106 req-2507bb5f-55c0-4c2f-9f55-1f5a2f309898 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Neutron deleted interface bde9750e-7c68-48e5-8cae-387ef9e45fdc; detaching it from the instance and deleting it from the info cache [ 985.524630] env[62204]: DEBUG nova.network.neutron [req-4aa6e7a1-c293-4aa6-9cf2-85c01dde3106 req-2507bb5f-55c0-4c2f-9f55-1f5a2f309898 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.528686] env[62204]: DEBUG oslo_concurrency.lockutils [None req-12c2db6b-939c-4316-acd2-ee5bd93aa7c0 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "ddef8de2-530e-4b94-aff1-6f7e410f44fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.942s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.557031] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200242, 'name': PowerOffVM_Task, 'duration_secs': 1.13752} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.557031] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 985.557031] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 985.557031] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fbc04ae-7f6f-45ab-9871-d9fc7de2291c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.567757] env[62204]: DEBUG oslo_concurrency.lockutils [None req-393b4ff3-74d7-4542-ae0b-5faef69df84b tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "interface-7671c77f-3da8-4a41-a472-138c7bd23a92-5c5577ec-23eb-4ba7-b9ca-ed5fe89b9e38" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.150s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.645087] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 985.645327] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 985.645517] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleting the datastore file [datastore1] 7671c77f-3da8-4a41-a472-138c7bd23a92 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.645787] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-461d698f-8d17-44ca-b896-91ade011f78b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.654423] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 985.654423] env[62204]: value = "task-1200244" [ 985.654423] env[62204]: _type = "Task" [ 985.654423] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.665483] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.668625] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200235, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.002481} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.668861] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c/OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c.vmdk to [datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk. [ 985.669060] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Cleaning up location [datastore1] OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 985.669233] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9c8011f0-8bcf-42be-9b6e-cee2595c749c {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 985.669560] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb0185e6-86aa-4c35-bb4b-15268e4afae0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.675373] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 985.675373] env[62204]: value = "task-1200245" [ 985.675373] env[62204]: _type = "Task" [ 985.675373] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.682736] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200245, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.834200] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.908157] env[62204]: DEBUG oslo_vmware.api [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.683463} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.908443] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.908635] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.908844] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.908997] env[62204]: INFO nova.compute.manager [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Took 2.64 seconds to destroy the instance on the hypervisor. [ 985.909272] env[62204]: DEBUG oslo.service.loopingcall [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 985.909475] env[62204]: DEBUG nova.compute.manager [-] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 985.909568] env[62204]: DEBUG nova.network.neutron [-] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 985.993051] env[62204]: DEBUG nova.network.neutron [-] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.031039] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-267a0c4e-b657-4653-bd59-d10208f1cdc8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.040210] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178f9649-f517-4c78-9a24-455b2e0213de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.081813] env[62204]: DEBUG nova.compute.manager [req-4aa6e7a1-c293-4aa6-9cf2-85c01dde3106 req-2507bb5f-55c0-4c2f-9f55-1f5a2f309898 service nova] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Detach interface failed, port_id=bde9750e-7c68-48e5-8cae-387ef9e45fdc, reason: Instance aa336eda-d55a-4560-81bf-e4fcc6f4b485 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 986.164808] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.184698] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200245, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170939} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.187293] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.187475] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.187768] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk to [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.188435] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-349a0a38-82f6-4390-ba0b-9a2a1e4f3189 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.195419] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 986.195419] env[62204]: value = "task-1200246" [ 986.195419] env[62204]: _type = "Task" [ 986.195419] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.204382] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.330085] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1290c5f-ad9c-4be7-9f2b-b70de658ba3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.338489] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.341164] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7d924a-a3d6-4220-b040-c0032a798480 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.374787] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de70a4a-2719-4b1e-ac43-7d3400217c70 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.382233] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbceb74-d761-486a-8815-8f2f5e272fdf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.396328] env[62204]: DEBUG nova.compute.provider_tree [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.468775] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccb849e-39ad-4805-a6d2-88ca7d2cbca2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.492798] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 986.497017] env[62204]: INFO nova.compute.manager [-] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Took 1.24 seconds to deallocate network for instance. [ 986.670674] env[62204]: DEBUG oslo_vmware.api [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.712575} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.671082] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 986.671290] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 986.671489] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 986.671662] env[62204]: INFO nova.compute.manager [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Took 2.65 seconds to destroy the instance on the hypervisor. [ 986.671920] env[62204]: DEBUG oslo.service.loopingcall [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.672139] env[62204]: DEBUG nova.compute.manager [-] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 986.672231] env[62204]: DEBUG nova.network.neutron [-] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 986.712065] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.841260] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.899763] env[62204]: DEBUG nova.scheduler.client.report [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.000066] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 987.000432] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78f4db43-9978-464e-9fe3-622495cde2bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.003576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 987.013273] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 987.013273] env[62204]: value = "task-1200247" [ 987.013273] env[62204]: _type = "Task" [ 987.013273] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.026755] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.186873] env[62204]: DEBUG nova.network.neutron [-] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.212383] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.340657] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.408986] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.409548] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 987.412885] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.845s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 987.414715] env[62204]: INFO nova.compute.claims [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.528291] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200247, 'name': PowerOffVM_Task, 'duration_secs': 0.275776} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.528775] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.529264] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 987.586566] env[62204]: DEBUG nova.compute.manager [req-43841d16-d7b8-4813-8677-5855250f954d req-6548f408-6745-45a0-ae80-3ea5b37de723 service nova] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Received event network-vif-deleted-444f8f64-f9a6-4e48-ba55-fe1b6be68af2 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 987.694375] env[62204]: INFO nova.compute.manager [-] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Took 1.78 seconds to deallocate network for instance. [ 987.718625] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.848771] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.919930] env[62204]: DEBUG nova.compute.utils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.925948] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 987.926156] env[62204]: DEBUG nova.network.neutron [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 987.997800] env[62204]: DEBUG nova.compute.manager [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Stashing vm_state: active {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 988.037308] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 988.037865] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 988.038328] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.038619] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 988.038925] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.039156] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 988.039449] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 988.039701] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 988.039939] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 988.040208] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 988.040449] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 988.049422] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bbb3632-c785-45f9-a54c-b4b15cbbeaca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.075445] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 988.075445] env[62204]: value = "task-1200248" [ 988.075445] env[62204]: _type = "Task" [ 988.075445] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.091565] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200248, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.209355] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.217532] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.219876] env[62204]: DEBUG nova.policy [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '817e2eaba86149789ff8d3ff69f15489', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8108a8f6b5e04832aab188333bad1e0e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 988.346513] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200239, 'name': CloneVM_Task, 'duration_secs': 4.928124} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.346513] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Created linked-clone VM from snapshot [ 988.346513] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be1b0ba7-5055-4989-b3d1-77b4eec0001c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.361442] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Uploading image a8253c09-78b5-4570-80ae-c608a8498f80 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 988.378458] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 988.378791] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-62429ad9-20e4-4665-9ba8-4ce137914330 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.391030] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 988.391030] env[62204]: value = "task-1200249" [ 988.391030] env[62204]: _type = "Task" [ 988.391030] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.404683] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200249, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.423679] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 988.519033] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.590490] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200248, 'name': ReconfigVM_Task, 'duration_secs': 0.302352} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.594065] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 988.715191] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.777969] env[62204]: DEBUG nova.network.neutron [-] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.809668] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff0db69-072b-487d-a774-d1a3a6d5d858 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.817724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509fa975-ea20-4642-92da-9fa645e9a254 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.859020] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17645987-1e99-4749-b88f-4fc065a7664f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.863332] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7bf9a1-c061-4815-94fa-c3347fbac591 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.881667] env[62204]: DEBUG nova.compute.provider_tree [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.883701] env[62204]: DEBUG nova.network.neutron [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Successfully created port: 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.903949] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200249, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.992404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.992705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.099906] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.100055] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.100329] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.100435] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.100545] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.100702] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.101076] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.101145] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.101295] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.101459] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.101637] env[62204]: DEBUG nova.virt.hardware [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.107020] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Reconfiguring VM instance instance-00000061 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 989.107358] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e6cb0ff-4683-4b87-8008-a366d08248ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.126769] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 989.126769] env[62204]: value = "task-1200250" [ 989.126769] env[62204]: _type = "Task" [ 989.126769] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.136259] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.212127] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200246, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.675171} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.212445] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/64aeea2b-e127-4ab2-abff-027b5881ee9a/64aeea2b-e127-4ab2-abff-027b5881ee9a.vmdk to [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.213267] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a6409b-7ed6-4b33-be83-b039d13fa810 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.245798] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.246276] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91922e30-5939-4c26-be89-bfa4cfb6f9d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.277143] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 989.277143] env[62204]: value = "task-1200251" [ 989.277143] env[62204]: _type = "Task" [ 989.277143] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.289370] env[62204]: INFO nova.compute.manager [-] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Took 2.62 seconds to deallocate network for instance. [ 989.289849] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200251, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.387264] env[62204]: DEBUG nova.scheduler.client.report [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 989.405297] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200249, 'name': Destroy_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.435896] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 989.465085] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 989.465085] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 989.465085] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.465085] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 989.465586] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.465951] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 989.466928] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 989.466928] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 989.466928] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 989.467099] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 989.467895] env[62204]: DEBUG nova.virt.hardware [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 989.468349] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9eae3f-1f73-49e4-8715-95102db6f9fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.476356] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46357211-ee0e-4ae7-aa38-1920a03af792 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.496195] env[62204]: DEBUG nova.compute.utils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 989.636711] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200250, 'name': ReconfigVM_Task, 'duration_secs': 0.358326} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.636984] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Reconfigured VM instance instance-00000061 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 989.637811] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633aeb71-1c81-4e7c-bc21-0599210b9f60 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.659809] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 2178b629-4be6-473b-9a75-19efa234d442/2178b629-4be6-473b-9a75-19efa234d442.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.660124] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-650d9b73-f74f-4777-a51f-a645286ac818 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.677933] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 989.677933] env[62204]: value = "task-1200252" [ 989.677933] env[62204]: _type = "Task" [ 989.677933] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.685749] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200252, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.712252] env[62204]: DEBUG nova.compute.manager [req-44b9283d-7739-4d0b-ab10-42482234daa3 req-e97f502b-3c22-493c-becb-72b16840178d service nova] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Received event network-vif-deleted-4243893b-2fda-4a71-94f3-332643bceb52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.791311] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200251, 'name': ReconfigVM_Task, 'duration_secs': 0.414059} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.791672] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Reconfigured VM instance instance-0000003b to attach disk [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56/c0990e53-70c9-4536-b26a-bc00bd457c56.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.792857] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cd7e562-49e4-47ef-8fdf-63f7a86f4649 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.799864] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.800252] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 989.800252] env[62204]: value = "task-1200253" [ 989.800252] env[62204]: _type = "Task" [ 989.800252] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.807917] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200253, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.893074] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.893641] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 989.897449] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.430s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.897449] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.897449] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 989.897449] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.760s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.899260] env[62204]: INFO nova.compute.claims [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.905209] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e476d53d-2f50-44f7-b704-60ffef18016c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.916755] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc45bdf-e63c-486d-a496-e63ab77dde31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.921139] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200249, 'name': Destroy_Task, 'duration_secs': 1.175816} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.921139] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Destroyed the VM [ 989.921139] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 989.921743] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-042e7f4d-bb4d-4d11-af1e-82ab0fe1f9b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.932405] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56371056-8706-4771-84f8-8814a21e35c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.938088] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 989.938088] env[62204]: value = "task-1200254" [ 989.938088] env[62204]: _type = "Task" [ 989.938088] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.944252] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f338bd-66b0-422e-b067-e309b876c623 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.951201] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200254, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.979770] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178864MB free_disk=155GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 989.979929] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.999284] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.191767] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200252, 'name': ReconfigVM_Task, 'duration_secs': 0.300571} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.192029] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 2178b629-4be6-473b-9a75-19efa234d442/2178b629-4be6-473b-9a75-19efa234d442.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.192354] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.311626] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200253, 'name': Rename_Task, 'duration_secs': 0.149049} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.312894] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.313930] env[62204]: DEBUG nova.compute.manager [req-0f8f0318-f26b-4f6c-a3db-ac24093826b9 req-c811e6df-c219-41bc-bf53-5647dfa27895 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-vif-plugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 990.314145] env[62204]: DEBUG oslo_concurrency.lockutils [req-0f8f0318-f26b-4f6c-a3db-ac24093826b9 req-c811e6df-c219-41bc-bf53-5647dfa27895 service nova] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.314485] env[62204]: DEBUG oslo_concurrency.lockutils [req-0f8f0318-f26b-4f6c-a3db-ac24093826b9 req-c811e6df-c219-41bc-bf53-5647dfa27895 service nova] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.314659] env[62204]: DEBUG oslo_concurrency.lockutils [req-0f8f0318-f26b-4f6c-a3db-ac24093826b9 req-c811e6df-c219-41bc-bf53-5647dfa27895 service nova] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.314827] env[62204]: DEBUG nova.compute.manager [req-0f8f0318-f26b-4f6c-a3db-ac24093826b9 req-c811e6df-c219-41bc-bf53-5647dfa27895 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] No waiting events found dispatching network-vif-plugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 990.315032] env[62204]: WARNING nova.compute.manager [req-0f8f0318-f26b-4f6c-a3db-ac24093826b9 req-c811e6df-c219-41bc-bf53-5647dfa27895 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received unexpected event network-vif-plugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 for instance with vm_state building and task_state spawning. [ 990.315308] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-855df782-28f0-4d6f-b47d-e10739dbec2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.321902] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 990.321902] env[62204]: value = "task-1200255" [ 990.321902] env[62204]: _type = "Task" [ 990.321902] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.330071] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200255, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.405315] env[62204]: DEBUG nova.compute.utils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 990.405315] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 990.406189] env[62204]: DEBUG nova.network.neutron [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 990.449211] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200254, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.470497] env[62204]: DEBUG nova.policy [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 990.700516] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2c4d27-4a01-422a-acdb-532df7acaac5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.724441] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff437c8a-4abd-4aad-8037-17c5eb29b5f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.742615] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 990.832736] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200255, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.888695] env[62204]: DEBUG nova.network.neutron [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Successfully updated port: 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.908939] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 990.916515] env[62204]: DEBUG nova.network.neutron [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Successfully created port: aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 990.960611] env[62204]: DEBUG oslo_vmware.api [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200254, 'name': RemoveSnapshot_Task, 'duration_secs': 0.913248} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.961765] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 991.073520] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.073773] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.073999] env[62204]: INFO nova.compute.manager [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Attaching volume e7cba0e4-1e22-4040-be6e-c078b7175758 to /dev/sdb [ 991.119842] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476d4c11-a690-41f1-82ed-de97b239d8e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.131618] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b09b92-2ff7-4bf5-a0da-5f6e6505ca16 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.149636] env[62204]: DEBUG nova.virt.block_device [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating existing volume attachment record: e0a16e0f-fb13-47dd-a7df-7f6f269775e1 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 991.235922] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f03e51e-8522-4e59-a31e-342906d8e6db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.244360] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0b0397-313b-45a9-96bd-3d5955ec8347 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.276419] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae2fd6d-32a7-4af5-97eb-a69260edcdcf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.283591] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41765e23-cd76-4138-8e44-0af6aea7ea9a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.296382] env[62204]: DEBUG nova.compute.provider_tree [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.300633] env[62204]: DEBUG nova.network.neutron [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Port ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 991.333234] env[62204]: DEBUG oslo_vmware.api [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200255, 'name': PowerOnVM_Task, 'duration_secs': 0.676808} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.333548] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.394366] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.395851] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.395851] env[62204]: DEBUG nova.network.neutron [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 991.440286] env[62204]: DEBUG nova.compute.manager [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.441638] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9542252f-8338-4d1c-9e63-e14d6be54721 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.467246] env[62204]: WARNING nova.compute.manager [None req-af58be11-a4cf-46d8-93cb-a79be2d3c055 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Image not found during snapshot: nova.exception.ImageNotFound: Image a8253c09-78b5-4570-80ae-c608a8498f80 could not be found. [ 991.799518] env[62204]: DEBUG nova.scheduler.client.report [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.850964] env[62204]: DEBUG nova.compute.manager [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.851198] env[62204]: DEBUG nova.compute.manager [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing instance network info cache due to event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 991.851399] env[62204]: DEBUG oslo_concurrency.lockutils [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.925951] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 991.933711] env[62204]: DEBUG nova.network.neutron [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 991.948697] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 991.949014] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 991.949204] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.949426] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 991.949607] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.949765] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 991.952033] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 991.952278] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 991.952554] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 991.952796] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 991.953063] env[62204]: DEBUG nova.virt.hardware [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 991.958855] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d1db6b-f374-4b74-89b9-2fd95297c570 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.965632] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c1ebf5a9-5994-43ab-99ed-fa06b83df35c tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.548s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.972338] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f34006-fd7c-4756-8284-5c46b4806af9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.164984] env[62204]: DEBUG nova.network.neutron [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.309269] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.309771] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 992.319352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.316s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.320027] env[62204]: DEBUG nova.objects.instance [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lazy-loading 'resources' on Instance uuid aa336eda-d55a-4560-81bf-e4fcc6f4b485 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 992.326470] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2178b629-4be6-473b-9a75-19efa234d442-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.326708] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.326898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.615800] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.616125] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.616347] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.616533] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.616733] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.622065] env[62204]: INFO nova.compute.manager [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Terminating instance [ 992.624225] env[62204]: DEBUG nova.compute.manager [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 992.624457] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 992.625681] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eadcada-b026-44c1-af21-57a0e0ca9fb0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.633643] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 992.633857] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75238622-b00f-436f-a307-c222813d594d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.640379] env[62204]: DEBUG oslo_vmware.api [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 992.640379] env[62204]: value = "task-1200257" [ 992.640379] env[62204]: _type = "Task" [ 992.640379] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.650772] env[62204]: DEBUG oslo_vmware.api [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200257, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.668070] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.668455] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Instance network_info: |[{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 992.668819] env[62204]: DEBUG oslo_concurrency.lockutils [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.669016] env[62204]: DEBUG nova.network.neutron [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 992.671021] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:85:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59c7be21-51f9-4357-a2e4-24ec0bf0ed20', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.679271] env[62204]: DEBUG oslo.service.loopingcall [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 992.682712] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.683673] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4804675-066c-4902-8ca9-ee8bc2de8eb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.705129] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.705129] env[62204]: value = "task-1200258" [ 992.705129] env[62204]: _type = "Task" [ 992.705129] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.715316] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200258, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.787791] env[62204]: DEBUG nova.network.neutron [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Successfully updated port: aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 992.821869] env[62204]: DEBUG nova.compute.utils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 992.823810] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 992.824024] env[62204]: DEBUG nova.network.neutron [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 992.954719] env[62204]: DEBUG nova.policy [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6054f141cad7421f85bbb5944f408070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6907df6f17b142c0b4881f15f3b88a9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 993.137853] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4c6412-7ab5-4e83-bf9b-e16dafa67f9d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.148134] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b996a165-d212-48f3-a14c-2a40ca5defb9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.155573] env[62204]: DEBUG oslo_vmware.api [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200257, 'name': PowerOffVM_Task, 'duration_secs': 0.256112} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.156232] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 993.156423] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 993.156700] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9ddd291-51ec-487c-95ab-a6e108edf312 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.187314] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cad3da-e53a-4724-9a12-ca9ab9425094 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.195398] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b80372c-6ebf-4e88-a97f-7118ed350768 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.209642] env[62204]: DEBUG nova.compute.provider_tree [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.221086] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200258, 'name': CreateVM_Task, 'duration_secs': 0.332859} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.223037] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.223616] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.223831] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.224023] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleting the datastore file [datastore1] e42444b3-51c9-4d0f-9eee-c6f2e6631997 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.224968] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.225372] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.225827] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 993.226041] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-806bb6df-f6c1-45bb-9d06-5dd502b25a94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.228439] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13cd8bd1-6c9e-4839-8b5f-a858dea78250 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.234945] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 993.234945] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52508381-cf3e-bcd8-85ad-44b975fe0db6" [ 993.234945] env[62204]: _type = "Task" [ 993.234945] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.236319] env[62204]: DEBUG oslo_vmware.api [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for the task: (returnval){ [ 993.236319] env[62204]: value = "task-1200260" [ 993.236319] env[62204]: _type = "Task" [ 993.236319] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.249872] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52508381-cf3e-bcd8-85ad-44b975fe0db6, 'name': SearchDatastore_Task, 'duration_secs': 0.009473} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.252924] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 993.253194] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.253489] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.253648] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.253830] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.254119] env[62204]: DEBUG oslo_vmware.api [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.254452] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c631bf38-b104-432b-b6d9-c04c8eca94d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.264615] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.264812] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 993.265553] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-824d068b-ba52-4f7c-b75c-9ffae862d013 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.270696] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 993.270696] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525b1620-2e28-30ae-00ed-3fbe4277ce4e" [ 993.270696] env[62204]: _type = "Task" [ 993.270696] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.278532] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525b1620-2e28-30ae-00ed-3fbe4277ce4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.294339] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-ade509d8-5d7c-4926-bb2f-067dce84f76c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.294470] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-ade509d8-5d7c-4926-bb2f-067dce84f76c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.294571] env[62204]: DEBUG nova.network.neutron [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.326923] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 993.343673] env[62204]: DEBUG nova.network.neutron [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updated VIF entry in instance network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 993.344078] env[62204]: DEBUG nova.network.neutron [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.374251] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.374454] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.374732] env[62204]: DEBUG nova.network.neutron [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 993.655899] env[62204]: DEBUG nova.network.neutron [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Successfully created port: 2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.716280] env[62204]: DEBUG nova.scheduler.client.report [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.750349] env[62204]: DEBUG oslo_vmware.api [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Task: {'id': task-1200260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15466} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.750625] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 993.750836] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 993.751071] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 993.751258] env[62204]: INFO nova.compute.manager [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Took 1.13 seconds to destroy the instance on the hypervisor. [ 993.751502] env[62204]: DEBUG oslo.service.loopingcall [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 993.751697] env[62204]: DEBUG nova.compute.manager [-] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 993.751801] env[62204]: DEBUG nova.network.neutron [-] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 993.780623] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525b1620-2e28-30ae-00ed-3fbe4277ce4e, 'name': SearchDatastore_Task, 'duration_secs': 0.008231} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.781428] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78d4d124-b07b-4348-b8fd-f9133c80ba82 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.786348] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 993.786348] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522daaf3-c7f4-410a-c52e-65b182d986a5" [ 993.786348] env[62204]: _type = "Task" [ 993.786348] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.794302] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522daaf3-c7f4-410a-c52e-65b182d986a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.847903] env[62204]: DEBUG oslo_concurrency.lockutils [req-e9daa6cf-5bbd-4621-a52c-bcc22dcaa24d req-e6e9e97a-3b69-4137-a17a-e572a50910b3 service nova] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.097108] env[62204]: DEBUG nova.network.neutron [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 994.182174] env[62204]: DEBUG nova.network.neutron [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [{"id": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "address": "fa:16:3e:27:10:6c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba57ac2e-ab", "ovs_interfaceid": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.225040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.903s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.225367] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.016s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.226185] env[62204]: DEBUG nova.objects.instance [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'resources' on Instance uuid 9cf3ca28-443f-4e06-9f04-103b5b6cddd4 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.238127] env[62204]: DEBUG nova.compute.manager [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Received event network-vif-plugged-aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.238127] env[62204]: DEBUG oslo_concurrency.lockutils [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] Acquiring lock "ade509d8-5d7c-4926-bb2f-067dce84f76c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 994.238127] env[62204]: DEBUG oslo_concurrency.lockutils [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.238127] env[62204]: DEBUG oslo_concurrency.lockutils [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.238127] env[62204]: DEBUG nova.compute.manager [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] No waiting events found dispatching network-vif-plugged-aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 994.238127] env[62204]: WARNING nova.compute.manager [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Received unexpected event network-vif-plugged-aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 for instance with vm_state building and task_state spawning. [ 994.239600] env[62204]: DEBUG nova.compute.manager [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Received event network-changed-aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.239600] env[62204]: DEBUG nova.compute.manager [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Refreshing instance network info cache due to event network-changed-aa247f33-6bdf-4e2e-b1aa-fbd26891eff4. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 994.239600] env[62204]: DEBUG oslo_concurrency.lockutils [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] Acquiring lock "refresh_cache-ade509d8-5d7c-4926-bb2f-067dce84f76c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 994.258346] env[62204]: DEBUG nova.compute.manager [req-c477cb76-4714-4554-aa25-4281b7b2e128 req-ca18a3e0-50ee-40c8-af0f-b6bdb1a2e125 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Received event network-vif-deleted-6823acf4-9576-4220-9d4c-f7c640e04f3e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 994.258346] env[62204]: INFO nova.compute.manager [req-c477cb76-4714-4554-aa25-4281b7b2e128 req-ca18a3e0-50ee-40c8-af0f-b6bdb1a2e125 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Neutron deleted interface 6823acf4-9576-4220-9d4c-f7c640e04f3e; detaching it from the instance and deleting it from the info cache [ 994.258346] env[62204]: DEBUG nova.network.neutron [req-c477cb76-4714-4554-aa25-4281b7b2e128 req-ca18a3e0-50ee-40c8-af0f-b6bdb1a2e125 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.260152] env[62204]: INFO nova.scheduler.client.report [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Deleted allocations for instance aa336eda-d55a-4560-81bf-e4fcc6f4b485 [ 994.305128] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522daaf3-c7f4-410a-c52e-65b182d986a5, 'name': SearchDatastore_Task, 'duration_secs': 0.012342} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.305516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.305862] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.306199] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbb1f9d3-51ba-46bf-906e-60cad564401f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.313101] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 994.313101] env[62204]: value = "task-1200262" [ 994.313101] env[62204]: _type = "Task" [ 994.313101] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.321850] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.335972] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 994.362413] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.362819] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.363064] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.363341] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.363589] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.363864] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.364110] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.364338] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.364564] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.364793] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.365064] env[62204]: DEBUG nova.virt.hardware [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.365955] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7dc5c1-7d9d-4e1b-b445-0d108f5b5e83 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.375823] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4312c87-1295-43af-8a71-08f3fcdeee6e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.475628] env[62204]: DEBUG nova.network.neutron [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Updating instance_info_cache with network_info: [{"id": "aa247f33-6bdf-4e2e-b1aa-fbd26891eff4", "address": "fa:16:3e:94:19:43", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa247f33-6b", "ovs_interfaceid": "aa247f33-6bdf-4e2e-b1aa-fbd26891eff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.615922] env[62204]: DEBUG nova.network.neutron [-] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.684590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.763198] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ebbfbea0-7c2c-40b7-8db5-8c63aab90920 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.767600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-1d6cb687-af30-4c8c-a279-cd21d9f184a9 tempest-InstanceActionsTestJSON-387814559 tempest-InstanceActionsTestJSON-387814559-project-member] Lock "aa336eda-d55a-4560-81bf-e4fcc6f4b485" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.160s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.773807] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472d1b31-dd8e-474f-b354-19467702b457 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.808910] env[62204]: DEBUG nova.compute.manager [req-c477cb76-4714-4554-aa25-4281b7b2e128 req-ca18a3e0-50ee-40c8-af0f-b6bdb1a2e125 service nova] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Detach interface failed, port_id=6823acf4-9576-4220-9d4c-f7c640e04f3e, reason: Instance e42444b3-51c9-4d0f-9eee-c6f2e6631997 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 994.822978] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200262, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.978924] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-ade509d8-5d7c-4926-bb2f-067dce84f76c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.979307] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Instance network_info: |[{"id": "aa247f33-6bdf-4e2e-b1aa-fbd26891eff4", "address": "fa:16:3e:94:19:43", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa247f33-6b", "ovs_interfaceid": "aa247f33-6bdf-4e2e-b1aa-fbd26891eff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 994.980650] env[62204]: DEBUG oslo_concurrency.lockutils [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] Acquired lock "refresh_cache-ade509d8-5d7c-4926-bb2f-067dce84f76c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.980650] env[62204]: DEBUG nova.network.neutron [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Refreshing network info cache for port aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 994.981283] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:19:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa247f33-6bdf-4e2e-b1aa-fbd26891eff4', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.988931] env[62204]: DEBUG oslo.service.loopingcall [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 994.989238] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.990448] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79fd18b9-8ea1-43c5-9b69-309725a3be6a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.005437] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e24680-3339-4df4-8cab-c73e1ee1357a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.017341] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.017341] env[62204]: value = "task-1200263" [ 995.017341] env[62204]: _type = "Task" [ 995.017341] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.018439] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6574a13a-0812-4892-8066-e14d20ab8093 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.062588] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f5ab18-bd2a-46ad-a673-68596de5ee1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.065332] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200263, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.066142] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667f28e2-0b7f-44b9-9921-3dc023d56f36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.075736] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Suspending the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 995.078633] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-2eddf714-e43c-462f-8bbb-6fd12f3d3d99 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.082157] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c550a1be-2be1-4b0a-ada0-50dc8ac917c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.100524] env[62204]: DEBUG nova.compute.provider_tree [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.105939] env[62204]: DEBUG oslo_vmware.api [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 995.105939] env[62204]: value = "task-1200264" [ 995.105939] env[62204]: _type = "Task" [ 995.105939] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.118406] env[62204]: INFO nova.compute.manager [-] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Took 1.37 seconds to deallocate network for instance. [ 995.118772] env[62204]: DEBUG oslo_vmware.api [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200264, 'name': SuspendVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.216857] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2349a7ef-fdd2-4a69-a568-7fc6d1b6dcd0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.240537] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7848159e-aedc-4c5b-974b-2e539c20579d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.251046] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 995.325104] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.979058} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.325104] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.325104] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.325489] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-831791eb-0f42-4c8d-a0af-831f9d594123 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.332554] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 995.332554] env[62204]: value = "task-1200265" [ 995.332554] env[62204]: _type = "Task" [ 995.332554] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.343147] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.536031] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200263, 'name': CreateVM_Task, 'duration_secs': 0.390569} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.536579] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.537374] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.537543] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.537940] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 995.538242] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b7b021d-9be8-4d08-b9bd-b0d975de23f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.542955] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 995.542955] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f5a0c2-4db3-bd3b-b6f4-36c13d854251" [ 995.542955] env[62204]: _type = "Task" [ 995.542955] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.550954] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f5a0c2-4db3-bd3b-b6f4-36c13d854251, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.605962] env[62204]: DEBUG nova.scheduler.client.report [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 995.618314] env[62204]: DEBUG oslo_vmware.api [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200264, 'name': SuspendVM_Task} progress is 45%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.625100] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 995.701357] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 995.701680] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260125', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'name': 'volume-e7cba0e4-1e22-4040-be6e-c078b7175758', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'serial': 'e7cba0e4-1e22-4040-be6e-c078b7175758'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 995.702486] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c290a4b8-dac6-4489-92fc-8bcb8aa34645 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.723019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a3e443-c1f8-406e-8cf2-7d33e13ec646 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.747599] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] volume-e7cba0e4-1e22-4040-be6e-c078b7175758/volume-e7cba0e4-1e22-4040-be6e-c078b7175758.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.747927] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97f22fe3-57e5-404a-bccb-849892e035d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.766302] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 995.766600] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52d835f0-ceae-4fd5-a0de-b121557ecf02 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.774099] env[62204]: DEBUG oslo_vmware.api [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 995.774099] env[62204]: value = "task-1200267" [ 995.774099] env[62204]: _type = "Task" [ 995.774099] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.775408] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 995.775408] env[62204]: value = "task-1200266" [ 995.775408] env[62204]: _type = "Task" [ 995.775408] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.787436] env[62204]: DEBUG oslo_vmware.api [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200267, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.790550] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200266, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.846391] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073928} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.846391] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.847345] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efb42c5-f1c5-42ab-b26a-4156cdd073fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.874834] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.875221] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de655548-fc13-497a-bdf9-ca0b9cf765a9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.901192] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 995.901192] env[62204]: value = "task-1200268" [ 995.901192] env[62204]: _type = "Task" [ 995.901192] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.911359] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.055367] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f5a0c2-4db3-bd3b-b6f4-36c13d854251, 'name': SearchDatastore_Task, 'duration_secs': 0.013974} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.058794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.059271] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.059637] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.060124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.060432] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.061608] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e588ca3-48c7-4ecd-af80-d3697ea45817 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.078066] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.078066] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.078066] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d27a2b9a-c242-4ae6-9ece-b3dcc3c46bbe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.082923] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 996.082923] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52441a13-cfc9-f397-7daf-dba1148b5781" [ 996.082923] env[62204]: _type = "Task" [ 996.082923] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.092206] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52441a13-cfc9-f397-7daf-dba1148b5781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.114853] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.890s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.117371] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.598s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.124726] env[62204]: DEBUG oslo_vmware.api [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200264, 'name': SuspendVM_Task} progress is 45%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.156790] env[62204]: INFO nova.scheduler.client.report [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted allocations for instance 9cf3ca28-443f-4e06-9f04-103b5b6cddd4 [ 996.176780] env[62204]: DEBUG nova.network.neutron [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Updated VIF entry in instance network info cache for port aa247f33-6bdf-4e2e-b1aa-fbd26891eff4. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 996.176780] env[62204]: DEBUG nova.network.neutron [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Updating instance_info_cache with network_info: [{"id": "aa247f33-6bdf-4e2e-b1aa-fbd26891eff4", "address": "fa:16:3e:94:19:43", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa247f33-6b", "ovs_interfaceid": "aa247f33-6bdf-4e2e-b1aa-fbd26891eff4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.271428] env[62204]: DEBUG nova.compute.manager [req-59b846d2-f723-4e0b-be56-90df0c8205ac req-d532e137-d207-4a08-924e-504bcc008f69 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Received event network-vif-plugged-2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 996.271428] env[62204]: DEBUG oslo_concurrency.lockutils [req-59b846d2-f723-4e0b-be56-90df0c8205ac req-d532e137-d207-4a08-924e-504bcc008f69 service nova] Acquiring lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.271428] env[62204]: DEBUG oslo_concurrency.lockutils [req-59b846d2-f723-4e0b-be56-90df0c8205ac req-d532e137-d207-4a08-924e-504bcc008f69 service nova] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.271428] env[62204]: DEBUG oslo_concurrency.lockutils [req-59b846d2-f723-4e0b-be56-90df0c8205ac req-d532e137-d207-4a08-924e-504bcc008f69 service nova] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.271428] env[62204]: DEBUG nova.compute.manager [req-59b846d2-f723-4e0b-be56-90df0c8205ac req-d532e137-d207-4a08-924e-504bcc008f69 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] No waiting events found dispatching network-vif-plugged-2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 996.271833] env[62204]: WARNING nova.compute.manager [req-59b846d2-f723-4e0b-be56-90df0c8205ac req-d532e137-d207-4a08-924e-504bcc008f69 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Received unexpected event network-vif-plugged-2d9bc2f9-f421-48d9-a636-57fac9c47255 for instance with vm_state building and task_state spawning. [ 996.292047] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200266, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.293589] env[62204]: DEBUG oslo_vmware.api [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200267, 'name': ReconfigVM_Task, 'duration_secs': 0.470039} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.294082] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfigured VM instance instance-0000005d to attach disk [datastore2] volume-e7cba0e4-1e22-4040-be6e-c078b7175758/volume-e7cba0e4-1e22-4040-be6e-c078b7175758.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.299134] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c725b19-6dde-4bfc-be86-30081190d552 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.316480] env[62204]: DEBUG oslo_vmware.api [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 996.316480] env[62204]: value = "task-1200269" [ 996.316480] env[62204]: _type = "Task" [ 996.316480] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.325400] env[62204]: DEBUG oslo_vmware.api [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.338241] env[62204]: DEBUG nova.network.neutron [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Successfully updated port: 2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 996.411859] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.593911] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52441a13-cfc9-f397-7daf-dba1148b5781, 'name': SearchDatastore_Task, 'duration_secs': 0.02602} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.594787] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc6df979-31df-4ed3-95ff-2522ef6020a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.600330] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 996.600330] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c09cbc-cca1-a698-bc79-a195636d3f8b" [ 996.600330] env[62204]: _type = "Task" [ 996.600330] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.608967] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c09cbc-cca1-a698-bc79-a195636d3f8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.618227] env[62204]: DEBUG oslo_vmware.api [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200264, 'name': SuspendVM_Task, 'duration_secs': 1.208184} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.618498] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Suspended the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 996.618687] env[62204]: DEBUG nova.compute.manager [None req-e7433b81-e674-481b-8168-b40482095413 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 996.619455] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fc1b2e-5551-4244-9dfd-cd91bb710ad8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.624275] env[62204]: INFO nova.compute.claims [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.668071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fdb9b9dd-68fd-4ea8-a53d-39ad00735038 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "9cf3ca28-443f-4e06-9f04-103b5b6cddd4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.405s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.678011] env[62204]: DEBUG oslo_concurrency.lockutils [req-2a49e0be-e503-4f47-b245-565b71933b94 req-5daeae56-e20c-4d0e-87c6-430fbfe8ac2e service nova] Releasing lock "refresh_cache-ade509d8-5d7c-4926-bb2f-067dce84f76c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.790120] env[62204]: DEBUG oslo_vmware.api [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200266, 'name': PowerOnVM_Task, 'duration_secs': 0.896241} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.790503] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.790786] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ce3db0-e66d-49db-97a2-0f0404ad6265 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance '2178b629-4be6-473b-9a75-19efa234d442' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 996.827768] env[62204]: DEBUG oslo_vmware.api [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200269, 'name': ReconfigVM_Task, 'duration_secs': 0.343169} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.828151] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260125', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'name': 'volume-e7cba0e4-1e22-4040-be6e-c078b7175758', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'serial': 'e7cba0e4-1e22-4040-be6e-c078b7175758'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 996.845105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-1c52b662-e436-4e0c-a77b-0f2fc1041a7d" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.845105] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-1c52b662-e436-4e0c-a77b-0f2fc1041a7d" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.845105] env[62204]: DEBUG nova.network.neutron [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 996.912253] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200268, 'name': ReconfigVM_Task, 'duration_secs': 0.838527} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.912571] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.913276] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05a81385-7cf8-46b6-b459-13dfa0d06018 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.920857] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 996.920857] env[62204]: value = "task-1200270" [ 996.920857] env[62204]: _type = "Task" [ 996.920857] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.931531] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200270, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.112954] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c09cbc-cca1-a698-bc79-a195636d3f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.010841} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.113320] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.113756] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ade509d8-5d7c-4926-bb2f-067dce84f76c/ade509d8-5d7c-4926-bb2f-067dce84f76c.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.114155] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a97689b-d2dd-467d-9c3c-a4cdde48e7db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.122738] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 997.122738] env[62204]: value = "task-1200271" [ 997.122738] env[62204]: _type = "Task" [ 997.122738] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.133948] env[62204]: INFO nova.compute.resource_tracker [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating resource usage from migration d79ae017-636d-4eab-a832-79fdb917977d [ 997.136569] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200271, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.382237] env[62204]: DEBUG nova.network.neutron [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 997.411008] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b848815f-3574-454a-98d4-ae69b44c8221 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.420020] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8250dcd7-37fd-445c-8509-fac4e6b41762 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.431395] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200270, 'name': Rename_Task, 'duration_secs': 0.1476} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.458027] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.461368] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6ab7384-8c03-4216-8670-fc514b79fc38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.463487] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6459d19-54ea-45ed-93bb-312fc92a0c8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.472019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d54848-8016-452e-a108-71ec5107ec04 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.475764] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 997.475764] env[62204]: value = "task-1200272" [ 997.475764] env[62204]: _type = "Task" [ 997.475764] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.489018] env[62204]: DEBUG nova.compute.provider_tree [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.495732] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200272, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.603732] env[62204]: DEBUG nova.network.neutron [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Updating instance_info_cache with network_info: [{"id": "2d9bc2f9-f421-48d9-a636-57fac9c47255", "address": "fa:16:3e:2f:31:39", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d9bc2f9-f4", "ovs_interfaceid": "2d9bc2f9-f421-48d9-a636-57fac9c47255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.635539] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200271, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.898934] env[62204]: DEBUG nova.objects.instance [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 21056adb-d81e-45bd-b354-1bcb488d2ed9 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.989126] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200272, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.992200] env[62204]: DEBUG nova.scheduler.client.report [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 998.086285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 998.086590] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.107395] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-1c52b662-e436-4e0c-a77b-0f2fc1041a7d" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.107807] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Instance network_info: |[{"id": "2d9bc2f9-f421-48d9-a636-57fac9c47255", "address": "fa:16:3e:2f:31:39", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d9bc2f9-f4", "ovs_interfaceid": "2d9bc2f9-f421-48d9-a636-57fac9c47255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 998.108616] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:31:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d9bc2f9-f421-48d9-a636-57fac9c47255', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.117251] env[62204]: DEBUG oslo.service.loopingcall [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.117928] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.118276] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c03af42c-8354-4120-9a32-924c975e09ab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.144711] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200271, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.712191} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.146181] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] ade509d8-5d7c-4926-bb2f-067dce84f76c/ade509d8-5d7c-4926-bb2f-067dce84f76c.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.146469] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.146883] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.146883] env[62204]: value = "task-1200273" [ 998.146883] env[62204]: _type = "Task" [ 998.146883] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.147357] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1fb6151-a015-4804-a5c1-cc6e0e6b8f71 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.158555] env[62204]: INFO nova.compute.manager [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Resuming [ 998.159432] env[62204]: DEBUG nova.objects.instance [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'flavor' on Instance uuid c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.160919] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200273, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.162995] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 998.162995] env[62204]: value = "task-1200274" [ 998.162995] env[62204]: _type = "Task" [ 998.162995] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.172714] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200274, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.301383] env[62204]: DEBUG nova.compute.manager [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Received event network-changed-2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 998.301702] env[62204]: DEBUG nova.compute.manager [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Refreshing instance network info cache due to event network-changed-2d9bc2f9-f421-48d9-a636-57fac9c47255. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 998.302030] env[62204]: DEBUG oslo_concurrency.lockutils [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] Acquiring lock "refresh_cache-1c52b662-e436-4e0c-a77b-0f2fc1041a7d" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 998.303504] env[62204]: DEBUG oslo_concurrency.lockutils [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] Acquired lock "refresh_cache-1c52b662-e436-4e0c-a77b-0f2fc1041a7d" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.303504] env[62204]: DEBUG nova.network.neutron [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Refreshing network info cache for port 2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 998.407779] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d871e27a-ab31-4b03-a9b4-7b5c5e27f9ee tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.334s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.489807] env[62204]: DEBUG oslo_vmware.api [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200272, 'name': PowerOnVM_Task, 'duration_secs': 0.743703} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.490750] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.491053] env[62204]: INFO nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Took 9.05 seconds to spawn the instance on the hypervisor. [ 998.491297] env[62204]: DEBUG nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 998.492359] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e58be79-fa1e-4a0e-8240-714308eb2442 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.497896] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.380s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.498140] env[62204]: INFO nova.compute.manager [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Migrating [ 998.506830] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.707s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 998.507100] env[62204]: DEBUG nova.objects.instance [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'resources' on Instance uuid 7671c77f-3da8-4a41-a472-138c7bd23a92 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.590339] env[62204]: INFO nova.compute.manager [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Detaching volume c03fb060-d8ba-44fe-b529-4e52b7dc7047 [ 998.629560] env[62204]: INFO nova.virt.block_device [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Attempting to driver detach volume c03fb060-d8ba-44fe-b529-4e52b7dc7047 from mountpoint /dev/sdb [ 998.629732] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 998.629942] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260098', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'name': 'volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4dc4546f-85e6-4259-9ccd-a7396669eace', 'attached_at': '', 'detached_at': '', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'serial': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 998.630940] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7d2060-d093-4612-aa3e-1aebf9fbd264 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.657365] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6454b5a9-5ad4-4542-b687-fae029f794ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.672147] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200273, 'name': CreateVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.678131] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3844f9b0-a900-48d4-887c-a086b6863b57 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.685473] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200274, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076289} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.702744] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.703724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b510af5d-8398-4712-ad4b-00c351dadee9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.707098] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8ca5f9-42f9-488b-a923-c9a0266a2bdd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.731721] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] ade509d8-5d7c-4926-bb2f-067dce84f76c/ade509d8-5d7c-4926-bb2f-067dce84f76c.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.743525] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ac3a13e-0f1f-4f89-8c3a-03f0105180e9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.758329] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] The volume has not been displaced from its original location: [datastore2] volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047/volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 998.763830] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 998.764512] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb7ff9db-f67e-43b1-b1de-bafe902ba02e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.783192] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 998.783192] env[62204]: value = "task-1200275" [ 998.783192] env[62204]: _type = "Task" [ 998.783192] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.784687] env[62204]: DEBUG oslo_vmware.api [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 998.784687] env[62204]: value = "task-1200276" [ 998.784687] env[62204]: _type = "Task" [ 998.784687] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.796090] env[62204]: DEBUG oslo_vmware.api [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200276, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.799367] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200275, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.026606] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.027040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.027153] env[62204]: DEBUG nova.network.neutron [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 999.040220] env[62204]: INFO nova.compute.manager [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Took 19.54 seconds to build instance. [ 999.073730] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2178b629-4be6-473b-9a75-19efa234d442" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.073730] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.073930] env[62204]: DEBUG nova.compute.manager [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Going to confirm migration 3 {{(pid=62204) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 999.083191] env[62204]: DEBUG nova.network.neutron [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Updated VIF entry in instance network info cache for port 2d9bc2f9-f421-48d9-a636-57fac9c47255. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 999.083623] env[62204]: DEBUG nova.network.neutron [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Updating instance_info_cache with network_info: [{"id": "2d9bc2f9-f421-48d9-a636-57fac9c47255", "address": "fa:16:3e:2f:31:39", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d9bc2f9-f4", "ovs_interfaceid": "2d9bc2f9-f421-48d9-a636-57fac9c47255", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.167203] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200273, 'name': CreateVM_Task, 'duration_secs': 0.548816} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.167664] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.168215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.168291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.168598] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 999.168856] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92c09e86-d3dc-43dd-8117-b76afb9617df {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.175503] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 999.175503] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203a8f8-0f9b-3ae6-a0a5-9128f2e467b5" [ 999.175503] env[62204]: _type = "Task" [ 999.175503] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.184168] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.184371] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquired lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.184557] env[62204]: DEBUG nova.network.neutron [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 999.185803] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203a8f8-0f9b-3ae6-a0a5-9128f2e467b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.278761] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a0c315-99dc-4717-8c06-1d12f3ad50fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.289202] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67ca534-56e8-46d2-8c06-480c5aacd18c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.300719] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.300961] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.302475] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200275, 'name': ReconfigVM_Task, 'duration_secs': 0.318023} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.329081] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Reconfigured VM instance instance-00000065 to attach disk [datastore2] ade509d8-5d7c-4926-bb2f-067dce84f76c/ade509d8-5d7c-4926-bb2f-067dce84f76c.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.331043] env[62204]: DEBUG oslo_vmware.api [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200276, 'name': ReconfigVM_Task, 'duration_secs': 0.310167} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.331300] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-410c937e-b52f-4e62-a076-6ce1f5aa5208 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.333423] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48fe8e53-1b9b-4620-b703-d4825d95743e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.335807] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 999.340785] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae3016c2-6fd9-48c2-820a-3d3d8967937b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.357085] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffa35dc-68e2-44cb-9ed3-74782f231c68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.361215] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 999.361215] env[62204]: value = "task-1200277" [ 999.361215] env[62204]: _type = "Task" [ 999.361215] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.362449] env[62204]: DEBUG oslo_vmware.api [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 999.362449] env[62204]: value = "task-1200278" [ 999.362449] env[62204]: _type = "Task" [ 999.362449] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.378517] env[62204]: DEBUG nova.compute.provider_tree [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.383224] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200277, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.387589] env[62204]: DEBUG oslo_vmware.api [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200278, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.542885] env[62204]: DEBUG oslo_concurrency.lockutils [None req-fd8f40f0-aa53-4e3f-8aa5-77d256b7aeba tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.054s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.587308] env[62204]: DEBUG oslo_concurrency.lockutils [req-7ef68d91-960e-49b4-8cdf-89ea086f0ed7 req-ed212014-b8ec-403e-997a-f33bbfed65d2 service nova] Releasing lock "refresh_cache-1c52b662-e436-4e0c-a77b-0f2fc1041a7d" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.686102] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5203a8f8-0f9b-3ae6-a0a5-9128f2e467b5, 'name': SearchDatastore_Task, 'duration_secs': 0.041681} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.686394] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 999.686634] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 999.687115] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.687287] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.687477] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 999.687765] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0a0d300-312b-45ef-8ce4-6793cd55d688 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.700114] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 999.702019] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 999.702019] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5edec5b9-56c6-4453-912c-6b927a3e7f56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.706443] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 999.706443] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52638250-e291-b459-80bb-e9b2eaaf65ea" [ 999.706443] env[62204]: _type = "Task" [ 999.706443] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.713976] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52638250-e291-b459-80bb-e9b2eaaf65ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.830517] env[62204]: DEBUG nova.compute.utils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 999.873948] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200277, 'name': Rename_Task, 'duration_secs': 0.157303} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.876811] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.877365] env[62204]: DEBUG oslo_vmware.api [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200278, 'name': ReconfigVM_Task, 'duration_secs': 0.178657} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.877548] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f796863-7712-4a7a-83eb-15b754aba4e4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.878990] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260098', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'name': 'volume-c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '4dc4546f-85e6-4259-9ccd-a7396669eace', 'attached_at': '', 'detached_at': '', 'volume_id': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047', 'serial': 'c03fb060-d8ba-44fe-b529-4e52b7dc7047'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 999.881615] env[62204]: DEBUG nova.network.neutron [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [{"id": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "address": "fa:16:3e:38:96:d6", "network": {"id": "248361b7-ee86-4f25-85b7-c17d36fec463", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1521802591-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7f1dbef99d9946d58fbe59f2850f6c63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb48dbbb-64", "ovs_interfaceid": "cb48dbbb-646f-445c-89d1-8c4a9e36de59", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.884961] env[62204]: DEBUG nova.scheduler.client.report [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 999.889165] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 999.889165] env[62204]: value = "task-1200279" [ 999.889165] env[62204]: _type = "Task" [ 999.889165] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.897997] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200279, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.958451] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.958672] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.958862] env[62204]: DEBUG nova.network.neutron [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 999.959064] env[62204]: DEBUG nova.objects.instance [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lazy-loading 'info_cache' on Instance uuid 2178b629-4be6-473b-9a75-19efa234d442 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.216857] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52638250-e291-b459-80bb-e9b2eaaf65ea, 'name': SearchDatastore_Task, 'duration_secs': 0.033977} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.217770] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77865a5e-f425-4131-9440-8eb175f9e7f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.223014] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1000.223014] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202ea5b-9395-8cd7-c5ef-9c8a3304e4a0" [ 1000.223014] env[62204]: _type = "Task" [ 1000.223014] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.231085] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202ea5b-9395-8cd7-c5ef-9c8a3304e4a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.236098] env[62204]: DEBUG nova.network.neutron [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.333789] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.033s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.384809] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Releasing lock "refresh_cache-c0990e53-70c9-4536-b26a-bc00bd457c56" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.385841] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aea8dea-1081-419e-9f34-8a2d78b09bc3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.390349] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.394209] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 10.414s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1000.398959] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Resuming the VM {{(pid=62204) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1000.399586] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e90400ce-7ce3-4811-900f-64dbd0f122d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.406496] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.407674] env[62204]: DEBUG oslo_vmware.api [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 1000.407674] env[62204]: value = "task-1200280" [ 1000.407674] env[62204]: _type = "Task" [ 1000.407674] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.415550] env[62204]: DEBUG oslo_vmware.api [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.422717] env[62204]: INFO nova.scheduler.client.report [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted allocations for instance 7671c77f-3da8-4a41-a472-138c7bd23a92 [ 1000.445983] env[62204]: DEBUG nova.objects.instance [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'flavor' on Instance uuid 4dc4546f-85e6-4259-9ccd-a7396669eace {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.630557] env[62204]: DEBUG nova.compute.manager [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1000.630771] env[62204]: DEBUG nova.compute.manager [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing instance network info cache due to event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1000.630993] env[62204]: DEBUG oslo_concurrency.lockutils [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.631156] env[62204]: DEBUG oslo_concurrency.lockutils [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.631460] env[62204]: DEBUG nova.network.neutron [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1000.733629] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202ea5b-9395-8cd7-c5ef-9c8a3304e4a0, 'name': SearchDatastore_Task, 'duration_secs': 0.017246} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.733952] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.734237] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 1c52b662-e436-4e0c-a77b-0f2fc1041a7d/1c52b662-e436-4e0c-a77b-0f2fc1041a7d.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1000.734513] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e7f6237-240b-4772-970d-dc4fdc15b99d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.738523] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.741366] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1000.741366] env[62204]: value = "task-1200281" [ 1000.741366] env[62204]: _type = "Task" [ 1000.741366] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.750072] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.939720] env[62204]: DEBUG oslo_concurrency.lockutils [None req-224a7540-164e-4d24-8f4e-bf4e3c2787ff tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "7671c77f-3da8-4a41-a472-138c7bd23a92" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.925s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.943033] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.959185] env[62204]: DEBUG oslo_vmware.api [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200280, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.196726] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.197087] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.198285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.198285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.198285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.204835] env[62204]: INFO nova.compute.manager [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Terminating instance [ 1001.206487] env[62204]: DEBUG nova.compute.manager [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1001.206685] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.207620] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb23a48-53f3-4d6a-bf86-4891e34a8b63 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.224458] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.227176] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64221e1e-3501-4baa-a26e-43d2b49787c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.238742] env[62204]: DEBUG oslo_vmware.api [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 1001.238742] env[62204]: value = "task-1200282" [ 1001.238742] env[62204]: _type = "Task" [ 1001.238742] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.258588] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200281, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.263039] env[62204]: DEBUG oslo_vmware.api [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.379759] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.380087] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.380376] env[62204]: INFO nova.compute.manager [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Attaching volume e661c092-1fad-4e6e-8508-fde31280e224 to /dev/sdc [ 1001.406617] env[62204]: DEBUG oslo_vmware.api [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200279, 'name': PowerOnVM_Task, 'duration_secs': 1.328203} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.407019] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.407328] env[62204]: INFO nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1001.407646] env[62204]: DEBUG nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1001.408676] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27bea82-3270-4154-95f9-15b45d7de23e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.420225] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Applying migration context for instance 2178b629-4be6-473b-9a75-19efa234d442 as it has an incoming, in-progress migration aa2b8ed9-0c99-4ff5-adda-44f43b2f020e. Migration status is confirming {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1001.420554] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Applying migration context for instance 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc as it has an incoming, in-progress migration d79ae017-636d-4eab-a832-79fdb917977d. Migration status is migrating {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1001.422714] env[62204]: INFO nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating resource usage from migration aa2b8ed9-0c99-4ff5-adda-44f43b2f020e [ 1001.423105] env[62204]: INFO nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating resource usage from migration d79ae017-636d-4eab-a832-79fdb917977d [ 1001.432780] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0805e1-78b1-4239-8c25-65f518db308e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.441583] env[62204]: DEBUG oslo_vmware.api [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200280, 'name': PowerOnVM_Task, 'duration_secs': 0.628289} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.443795] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Resumed the VM {{(pid=62204) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1001.444052] env[62204]: DEBUG nova.compute.manager [None req-7401e7d4-6491-45b0-99d5-6f9a4f952454 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1001.445581] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded4dbbc-c24e-4a7e-9425-3cd86aea3c2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.449333] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e23c525-0b44-4035-a6c6-173305147d81 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.455998] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance f5f0c15f-ae0d-4615-93ab-3203a5d7e090 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.456750] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 6dc170a4-b08e-44b5-a152-832670e6866b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.456965] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 98805916-8501-4afb-9e1c-a5393f6e5557 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.457139] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 4dc4546f-85e6-4259-9ccd-a7396669eace actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.457303] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance d97d792d-614f-42e3-8516-6c0a7cf15ad5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.457451] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 274285e5-fc23-48b4-b0d6-5a67bc764d78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.457617] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 21056adb-d81e-45bd-b354-1bcb488d2ed9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.457787] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance a93880fc-e517-4d83-98c1-9ce2405bf9d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.457944] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance c0990e53-70c9-4536-b26a-bc00bd457c56 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.458134] env[62204]: WARNING nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance e42444b3-51c9-4d0f-9eee-c6f2e6631997 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1001.458294] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Migration aa2b8ed9-0c99-4ff5-adda-44f43b2f020e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2178b629-4be6-473b-9a75-19efa234d442 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 3258243e-a9df-4b3e-a6bd-17e3b2168efe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance ade509d8-5d7c-4926-bb2f-067dce84f76c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 1c52b662-e436-4e0c-a77b-0f2fc1041a7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Migration d79ae017-636d-4eab-a832-79fdb917977d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1001.460043] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1001.463496] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d30699b-d59a-40b1-8767-11c799763f61 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.377s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.468060] env[62204]: DEBUG nova.network.neutron [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [{"id": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "address": "fa:16:3e:27:10:6c", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba57ac2e-ab", "ovs_interfaceid": "ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.476983] env[62204]: DEBUG nova.virt.block_device [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating existing volume attachment record: 5e513e10-09d5-41df-a356-608f442b48e8 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1001.754506] env[62204]: DEBUG oslo_vmware.api [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200282, 'name': PowerOffVM_Task, 'duration_secs': 0.404315} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.757499] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.757730] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.758453] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-565e5246-e0de-47ca-a0a0-345d65930b33 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.765181] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200281, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.790148] env[62204]: DEBUG nova.network.neutron [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updated VIF entry in instance network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1001.790535] env[62204]: DEBUG nova.network.neutron [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.797738] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e06d45-8c6a-425c-aada-bcbd819f8d12 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.807463] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10ba192-1aca-47d3-8aec-c12347f1f5ce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.837804] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a73f01c-7d21-4a01-9c97-9a17d1922f1b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.846020] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaa9841-73ac-48e1-b7d7-be214400ca2c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.858566] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.883804] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.883995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.884243] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "4dc4546f-85e6-4259-9ccd-a7396669eace-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.884427] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.884593] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.888303] env[62204]: INFO nova.compute.manager [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Terminating instance [ 1001.888718] env[62204]: DEBUG nova.compute.manager [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1001.888755] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.889721] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711ae5f9-1d75-47b0-8fcf-a3814b9427ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.896900] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.897150] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f8cfd48-9774-4c55-adc8-eb52f9d0f47a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.903171] env[62204]: DEBUG oslo_vmware.api [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1001.903171] env[62204]: value = "task-1200285" [ 1001.903171] env[62204]: _type = "Task" [ 1001.903171] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.912447] env[62204]: DEBUG oslo_vmware.api [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200285, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.922032] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.922355] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.922567] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleting the datastore file [datastore1] 274285e5-fc23-48b4-b0d6-5a67bc764d78 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.922844] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c447ef2-0cd1-48b9-969f-80159d6d04d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.932936] env[62204]: DEBUG oslo_vmware.api [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for the task: (returnval){ [ 1001.932936] env[62204]: value = "task-1200286" [ 1001.932936] env[62204]: _type = "Task" [ 1001.932936] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.940456] env[62204]: INFO nova.compute.manager [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Took 21.39 seconds to build instance. [ 1001.944684] env[62204]: DEBUG oslo_vmware.api [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.973624] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-2178b629-4be6-473b-9a75-19efa234d442" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.974008] env[62204]: DEBUG nova.objects.instance [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lazy-loading 'migration_context' on Instance uuid 2178b629-4be6-473b-9a75-19efa234d442 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.263801] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200281, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.134905} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.264579] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 1c52b662-e436-4e0c-a77b-0f2fc1041a7d/1c52b662-e436-4e0c-a77b-0f2fc1041a7d.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1002.264833] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1002.265647] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82a0d60-a0a7-4d33-bb39-d57b3eb1cbce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.268470] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ed50a82-a0d4-4633-b86a-c8863dc772cf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.288934] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1002.294672] env[62204]: DEBUG oslo_concurrency.lockutils [req-4ec83db7-cbab-4940-aed3-459d446b611f req-e0e6e4cc-84f1-422d-890b-80cf074a9ab1 service nova] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.295200] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1002.295200] env[62204]: value = "task-1200287" [ 1002.295200] env[62204]: _type = "Task" [ 1002.295200] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.303766] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.361710] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1002.413827] env[62204]: DEBUG oslo_vmware.api [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200285, 'name': PowerOffVM_Task, 'duration_secs': 0.38774} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.414129] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1002.414298] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1002.414558] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e10e7d4-8a8d-4524-a9bc-32d0f670f3f8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.441041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b37067de-40a4-4f2d-b682-d202994505a6 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.898s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.445524] env[62204]: DEBUG oslo_vmware.api [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Task: {'id': task-1200286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.48727} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.445833] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.446086] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.446331] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.446576] env[62204]: INFO nova.compute.manager [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1002.446879] env[62204]: DEBUG oslo.service.loopingcall [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.447137] env[62204]: DEBUG nova.compute.manager [-] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1002.447266] env[62204]: DEBUG nova.network.neutron [-] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1002.479325] env[62204]: DEBUG nova.objects.base [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Object Instance<2178b629-4be6-473b-9a75-19efa234d442> lazy-loaded attributes: info_cache,migration_context {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1002.480485] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b133c25-987b-4c78-b219-2e9eb0f54ce7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.500615] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8aee1c8-2066-4e5a-8439-e0684753aa2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.505717] env[62204]: DEBUG oslo_vmware.api [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1002.505717] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523c58d0-33ab-34a3-6f5e-74b3ed64d9ea" [ 1002.505717] env[62204]: _type = "Task" [ 1002.505717] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.514485] env[62204]: DEBUG oslo_vmware.api [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523c58d0-33ab-34a3-6f5e-74b3ed64d9ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.520745] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1002.521009] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1002.521221] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleting the datastore file [datastore1] 4dc4546f-85e6-4259-9ccd-a7396669eace {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.521491] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e189f53-26fd-4637-8ffe-ab3d29ca054a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.527283] env[62204]: DEBUG oslo_vmware.api [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1002.527283] env[62204]: value = "task-1200289" [ 1002.527283] env[62204]: _type = "Task" [ 1002.527283] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.534755] env[62204]: DEBUG oslo_vmware.api [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.798588] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1002.798984] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d137aed4-9c87-4c34-aac1-7d5340f173ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.809558] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.241937} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.810830] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.811226] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1002.811226] env[62204]: value = "task-1200290" [ 1002.811226] env[62204]: _type = "Task" [ 1002.811226] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.811889] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585327ab-888b-446d-8969-39149eea1a4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.823847] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.846182] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1c52b662-e436-4e0c-a77b-0f2fc1041a7d/1c52b662-e436-4e0c-a77b-0f2fc1041a7d.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.846542] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06c356b0-db3a-4e5e-b16a-3842fc379154 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.868269] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1002.868484] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.474s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.868806] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1002.868806] env[62204]: value = "task-1200291" [ 1002.868806] env[62204]: _type = "Task" [ 1002.868806] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.869625] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.244s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.869847] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.881387] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200291, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.893550] env[62204]: INFO nova.scheduler.client.report [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Deleted allocations for instance e42444b3-51c9-4d0f-9eee-c6f2e6631997 [ 1002.997941] env[62204]: DEBUG nova.compute.manager [req-f292e776-8b6b-4edb-a470-73b2a3944663 req-e989ff18-55ca-4b06-b961-d0b6bfab1505 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Received event network-vif-deleted-55c5fd12-e601-44a8-ab4f-2fb4f263333e {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1002.998212] env[62204]: INFO nova.compute.manager [req-f292e776-8b6b-4edb-a470-73b2a3944663 req-e989ff18-55ca-4b06-b961-d0b6bfab1505 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Neutron deleted interface 55c5fd12-e601-44a8-ab4f-2fb4f263333e; detaching it from the instance and deleting it from the info cache [ 1002.998399] env[62204]: DEBUG nova.network.neutron [req-f292e776-8b6b-4edb-a470-73b2a3944663 req-e989ff18-55ca-4b06-b961-d0b6bfab1505 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.016451] env[62204]: DEBUG oslo_vmware.api [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]523c58d0-33ab-34a3-6f5e-74b3ed64d9ea, 'name': SearchDatastore_Task, 'duration_secs': 0.010619} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.016792] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.017057] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.039812] env[62204]: DEBUG oslo_vmware.api [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466311} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.040552] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1003.040552] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1003.040680] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1003.040889] env[62204]: INFO nova.compute.manager [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1003.041185] env[62204]: DEBUG oslo.service.loopingcall [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1003.041426] env[62204]: DEBUG nova.compute.manager [-] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1003.041542] env[62204]: DEBUG nova.network.neutron [-] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1003.063113] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "032bbedb-7663-45a3-b2d0-37570d38f573" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.063360] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "032bbedb-7663-45a3-b2d0-37570d38f573" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.327311] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200290, 'name': PowerOffVM_Task, 'duration_secs': 0.282027} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.327660] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1003.327919] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1003.382954] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.399347] env[62204]: DEBUG nova.network.neutron [-] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.404863] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5ea00d58-6314-4e7b-950c-a7ba0026fd95 tempest-ImagesTestJSON-549754660 tempest-ImagesTestJSON-549754660-project-member] Lock "e42444b3-51c9-4d0f-9eee-c6f2e6631997" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.789s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.501652] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c8d4a43-88ac-4c2f-b89e-0a51b518ef80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.511602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbc44bb-3b4b-466a-a603-6da003510978 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.550266] env[62204]: DEBUG nova.compute.manager [req-f292e776-8b6b-4edb-a470-73b2a3944663 req-e989ff18-55ca-4b06-b961-d0b6bfab1505 service nova] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Detach interface failed, port_id=55c5fd12-e601-44a8-ab4f-2fb4f263333e, reason: Instance 274285e5-fc23-48b4-b0d6-5a67bc764d78 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1003.565297] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1003.829243] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36800d2-3f63-4838-a91a-05d359faa53f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.835245] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1003.835623] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1003.835867] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1003.836141] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1003.839286] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1003.844522] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1acb0826-f137-42e3-8e7f-9c67e407ced2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.862730] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3717d599-f0c6-4a76-a41c-400056b177f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.867698] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1003.867698] env[62204]: value = "task-1200293" [ 1003.867698] env[62204]: _type = "Task" [ 1003.867698] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.903274] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e7daec-4135-473d-bcda-60b92a6e963e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.907949] env[62204]: INFO nova.compute.manager [-] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Took 1.46 seconds to deallocate network for instance. [ 1003.908314] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200293, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.913775] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200291, 'name': ReconfigVM_Task, 'duration_secs': 0.785084} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.919217] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1c52b662-e436-4e0c-a77b-0f2fc1041a7d/1c52b662-e436-4e0c-a77b-0f2fc1041a7d.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.920486] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72922a94-151e-4d49-870b-dedc20abac3a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.922736] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205a4201-5809-47b4-82e5-0965842ad096 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.940891] env[62204]: DEBUG nova.compute.provider_tree [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1003.942916] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1003.942916] env[62204]: value = "task-1200294" [ 1003.942916] env[62204]: _type = "Task" [ 1003.942916] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.951722] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200294, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.094199] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.191542] env[62204]: DEBUG nova.network.neutron [-] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.376884] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200293, 'name': ReconfigVM_Task, 'duration_secs': 0.150152} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.377372] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1004.421872] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1004.458157] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200294, 'name': Rename_Task, 'duration_secs': 0.184095} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.458468] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.458724] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91225fc0-a2f9-4b60-b02c-5c8520349539 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.467772] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1004.467772] env[62204]: value = "task-1200295" [ 1004.467772] env[62204]: _type = "Task" [ 1004.467772] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.468698] env[62204]: ERROR nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [req-bb5b901b-5a06-4f4b-93a2-a340df40b926] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 92e8f362-5134-40c6-9a5c-0b8f64197972. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bb5b901b-5a06-4f4b-93a2-a340df40b926"}]} [ 1004.476519] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.487394] env[62204]: DEBUG nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Refreshing inventories for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1004.501979] env[62204]: DEBUG nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updating ProviderTree inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1004.502297] env[62204]: DEBUG nova.compute.provider_tree [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 155, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1004.519237] env[62204]: DEBUG nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Refreshing aggregate associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, aggregates: None {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1004.539550] env[62204]: DEBUG nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Refreshing trait associations for resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK {{(pid=62204) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1004.695424] env[62204]: INFO nova.compute.manager [-] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Took 1.65 seconds to deallocate network for instance. [ 1004.798393] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e2b05a-1f01-4eed-a96e-fe3c8667d472 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.806189] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41848a9-d69d-457f-9959-ede4a6be1543 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.840393] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53b7f8f-9ded-4bcd-8d76-89b58d353a32 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.848588] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af146ae-8646-499e-8d99-1bb82e105e8b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.862427] env[62204]: DEBUG nova.compute.provider_tree [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1004.887699] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1004.888365] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1004.888365] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.888365] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1004.888504] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.888595] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1004.888788] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1004.888956] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1004.889185] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1004.889363] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1004.889541] env[62204]: DEBUG nova.virt.hardware [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1004.894982] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Reconfiguring VM instance instance-00000042 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1004.895904] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27eaae42-558d-47ee-8a4f-cff319c9b658 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.915722] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1004.915722] env[62204]: value = "task-1200296" [ 1004.915722] env[62204]: _type = "Task" [ 1004.915722] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.922538] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200296, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.974904] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200295, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.203394] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.251338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1005.251652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1005.282188] env[62204]: DEBUG nova.compute.manager [req-61558ea6-b23b-4141-9ed2-f4ec1876340f req-9514081c-eeeb-420e-97aa-7e7bc7d2cdbe service nova] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Received event network-vif-deleted-13d59ea2-8671-4e65-a3f0-5839b8e92325 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1005.402649] env[62204]: DEBUG nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updated inventory for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1005.402967] env[62204]: DEBUG nova.compute.provider_tree [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updating resource provider 92e8f362-5134-40c6-9a5c-0b8f64197972 generation from 129 to 130 during operation: update_inventory {{(pid=62204) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1005.403722] env[62204]: DEBUG nova.compute.provider_tree [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Updating inventory in ProviderTree for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1005.427348] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200296, 'name': ReconfigVM_Task, 'duration_secs': 0.23484} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.427682] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Reconfigured VM instance instance-00000042 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1005.428609] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2cbed9f-95cb-4980-9467-4795c858f8fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.453439] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc/57e14d47-1d3f-4fed-93c1-11cfc17dc9bc.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.453439] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b72a7cdd-ff67-4e69-b925-f791faa782ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.475736] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200295, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.477197] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1005.477197] env[62204]: value = "task-1200297" [ 1005.477197] env[62204]: _type = "Task" [ 1005.477197] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.486704] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200297, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.755690] env[62204]: INFO nova.compute.manager [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Detaching volume 6debc6c9-3775-46fa-b3ae-21b56913f95b [ 1005.794320] env[62204]: INFO nova.virt.block_device [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Attempting to driver detach volume 6debc6c9-3775-46fa-b3ae-21b56913f95b from mountpoint /dev/sdb [ 1005.794571] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1005.794805] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260108', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'name': 'volume-6debc6c9-3775-46fa-b3ae-21b56913f95b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd97d792d-614f-42e3-8516-6c0a7cf15ad5', 'attached_at': '', 'detached_at': '', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'serial': '6debc6c9-3775-46fa-b3ae-21b56913f95b'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1005.795692] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71b87ff-7797-4893-9cac-d030708c06b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.830756] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23de0ef-655a-432c-b593-fd87fee40c31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.840538] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6667d50c-4939-4835-8bf7-eea845196609 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.860602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c148474-d69d-4316-92a7-7f4a0a713dee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.875763] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] The volume has not been displaced from its original location: [datastore2] volume-6debc6c9-3775-46fa-b3ae-21b56913f95b/volume-6debc6c9-3775-46fa-b3ae-21b56913f95b.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1005.881119] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfiguring VM instance instance-00000055 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1005.881418] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-376dbaa2-2160-456d-b4f3-4e146a3ba24f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.898607] env[62204]: DEBUG oslo_vmware.api [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1005.898607] env[62204]: value = "task-1200298" [ 1005.898607] env[62204]: _type = "Task" [ 1005.898607] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.906228] env[62204]: DEBUG oslo_vmware.api [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.976483] env[62204]: DEBUG oslo_vmware.api [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200295, 'name': PowerOnVM_Task, 'duration_secs': 1.2424} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.976704] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.977026] env[62204]: INFO nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Took 11.64 seconds to spawn the instance on the hypervisor. [ 1005.977223] env[62204]: DEBUG nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1005.978044] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debc9e2e-f04c-45fd-b833-d719298a1b85 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.991548] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200297, 'name': ReconfigVM_Task, 'duration_secs': 0.405343} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.991994] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc/57e14d47-1d3f-4fed-93c1-11cfc17dc9bc.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.992281] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1006.049342] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1006.049609] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260129', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'name': 'volume-e661c092-1fad-4e6e-8508-fde31280e224', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'serial': 'e661c092-1fad-4e6e-8508-fde31280e224'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1006.050519] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab83d7eb-ea48-44e3-a690-1018b779c8e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.070704] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78487325-7003-409d-8de4-ee7c364a504e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.099650] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] volume-e661c092-1fad-4e6e-8508-fde31280e224/volume-e661c092-1fad-4e6e-8508-fde31280e224.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1006.100316] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-011341ef-2759-4ee9-90d0-32dd6c4eebf9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.119124] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1006.119124] env[62204]: value = "task-1200299" [ 1006.119124] env[62204]: _type = "Task" [ 1006.119124] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.127712] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200299, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.409119] env[62204]: DEBUG oslo_vmware.api [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200298, 'name': ReconfigVM_Task, 'duration_secs': 0.326631} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.409457] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Reconfigured VM instance instance-00000055 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1006.414927] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.398s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.417616] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82eb16a7-7d04-462e-b167-20c15c332bcf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.428040] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.334s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.429510] env[62204]: INFO nova.compute.claims [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1006.437147] env[62204]: DEBUG oslo_vmware.api [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1006.437147] env[62204]: value = "task-1200300" [ 1006.437147] env[62204]: _type = "Task" [ 1006.437147] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.445438] env[62204]: DEBUG oslo_vmware.api [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.500843] env[62204]: INFO nova.compute.manager [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Took 21.38 seconds to build instance. [ 1006.502497] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468da786-addc-4e58-a614-39ea4d189c34 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.523095] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46fde73-21c5-4aa9-af05-d5e1a955f90d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.542280] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1006.635823] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200299, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.947615] env[62204]: DEBUG oslo_vmware.api [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200300, 'name': ReconfigVM_Task, 'duration_secs': 0.181921} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.947844] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260108', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'name': 'volume-6debc6c9-3775-46fa-b3ae-21b56913f95b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd97d792d-614f-42e3-8516-6c0a7cf15ad5', 'attached_at': '', 'detached_at': '', 'volume_id': '6debc6c9-3775-46fa-b3ae-21b56913f95b', 'serial': '6debc6c9-3775-46fa-b3ae-21b56913f95b'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1006.990205] env[62204]: INFO nova.scheduler.client.report [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocation for migration aa2b8ed9-0c99-4ff5-adda-44f43b2f020e [ 1007.006815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7d0b8e6c-7800-400a-a50f-7ea86c331e85 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.896s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.085128] env[62204]: DEBUG nova.network.neutron [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Port 4e81e820-357d-4b7e-900f-aaac4c7c2798 binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1007.129914] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.130249] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.130467] env[62204]: DEBUG nova.compute.manager [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1007.131308] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837706ed-f9c5-4fae-a6fd-acd1a2131be2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.137356] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200299, 'name': ReconfigVM_Task, 'duration_secs': 0.758863} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.138022] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfigured VM instance instance-0000005d to attach disk [datastore1] volume-e661c092-1fad-4e6e-8508-fde31280e224/volume-e661c092-1fad-4e6e-8508-fde31280e224.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1007.144486] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89626cfa-11de-46a7-b164-fae4705bd2ea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.154986] env[62204]: DEBUG nova.compute.manager [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1007.155628] env[62204]: DEBUG nova.objects.instance [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'flavor' on Instance uuid 1c52b662-e436-4e0c-a77b-0f2fc1041a7d {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.162258] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1007.162258] env[62204]: value = "task-1200301" [ 1007.162258] env[62204]: _type = "Task" [ 1007.162258] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.170561] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200301, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.307667] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.308049] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.308273] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.308461] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.308637] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.310836] env[62204]: INFO nova.compute.manager [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Terminating instance [ 1007.312635] env[62204]: DEBUG nova.compute.manager [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1007.312820] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.313669] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9878994-217d-474a-9645-551004ccb7a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.321292] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.321528] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c80161a8-8565-4c10-9abf-c001cdc11850 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.327803] env[62204]: DEBUG oslo_vmware.api [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 1007.327803] env[62204]: value = "task-1200302" [ 1007.327803] env[62204]: _type = "Task" [ 1007.327803] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.336332] env[62204]: DEBUG oslo_vmware.api [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.498956] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0328836e-c6cc-40f3-b937-06fabec04c59 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.425s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.504998] env[62204]: DEBUG nova.objects.instance [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'flavor' on Instance uuid d97d792d-614f-42e3-8516-6c0a7cf15ad5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.661578] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2732ab1-2733-4bf6-a75b-2359581de7e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.666371] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.666664] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2919b018-fbd3-4c46-91d2-f6ba5bf520b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.679666] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3ec907-8676-4ad8-9806-a5aba1d66bea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.682418] env[62204]: DEBUG oslo_vmware.api [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1007.682418] env[62204]: value = "task-1200303" [ 1007.682418] env[62204]: _type = "Task" [ 1007.682418] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.682975] env[62204]: DEBUG oslo_vmware.api [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200301, 'name': ReconfigVM_Task, 'duration_secs': 0.203504} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.683414] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260129', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'name': 'volume-e661c092-1fad-4e6e-8508-fde31280e224', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'serial': 'e661c092-1fad-4e6e-8508-fde31280e224'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1007.715918] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b80d3f-83a6-4671-a605-3b596e3b23ff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.722017] env[62204]: DEBUG oslo_vmware.api [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.727124] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda00a9-1a2f-4e90-9bb5-19a582a3850d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.741775] env[62204]: DEBUG nova.compute.provider_tree [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.838132] env[62204]: DEBUG oslo_vmware.api [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200302, 'name': PowerOffVM_Task, 'duration_secs': 0.201908} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.838354] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.838530] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.838797] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3c6e3cc-3115-4752-830e-0c8c89ee34cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.908750] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.909056] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.909279] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleting the datastore file [datastore1] c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.909552] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3182c7b4-e22c-4bfd-aea4-782a9894034a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.916305] env[62204]: DEBUG oslo_vmware.api [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for the task: (returnval){ [ 1007.916305] env[62204]: value = "task-1200305" [ 1007.916305] env[62204]: _type = "Task" [ 1007.916305] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.926114] env[62204]: DEBUG oslo_vmware.api [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.112442] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.112591] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.112648] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.195305] env[62204]: DEBUG oslo_vmware.api [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200303, 'name': PowerOffVM_Task, 'duration_secs': 0.195602} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.195591] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.195825] env[62204]: DEBUG nova.compute.manager [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1008.196672] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ceb60f-bf52-4544-82d2-5d6761f6f437 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.246324] env[62204]: DEBUG nova.scheduler.client.report [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.353751] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2178b629-4be6-473b-9a75-19efa234d442" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.354073] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.354320] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "2178b629-4be6-473b-9a75-19efa234d442-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.354516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.354714] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.357126] env[62204]: INFO nova.compute.manager [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Terminating instance [ 1008.358932] env[62204]: DEBUG nova.compute.manager [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1008.359157] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.359992] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc59b23-f4b5-44a3-88bf-c4ce01eadc6d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.367534] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.367798] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce20de03-704f-4316-9ae2-b1e50a5d6e1a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.373592] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1008.373592] env[62204]: value = "task-1200306" [ 1008.373592] env[62204]: _type = "Task" [ 1008.373592] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.381787] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.428139] env[62204]: DEBUG oslo_vmware.api [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Task: {'id': task-1200305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33473} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.428523] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.428789] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.429063] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.429329] env[62204]: INFO nova.compute.manager [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1008.429673] env[62204]: DEBUG oslo.service.loopingcall [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1008.429942] env[62204]: DEBUG nova.compute.manager [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1008.430087] env[62204]: DEBUG nova.network.neutron [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1008.514078] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55bd1da8-bb59-4b19-9ed5-4855767f55f5 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.262s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.709552] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7ca7ee5a-896c-4bca-a7cb-a1e8d75da490 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.579s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.726397] env[62204]: DEBUG nova.objects.instance [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 21056adb-d81e-45bd-b354-1bcb488d2ed9 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.752509] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.753391] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1008.765020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.340s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.765020] env[62204]: DEBUG nova.objects.instance [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lazy-loading 'resources' on Instance uuid 274285e5-fc23-48b4-b0d6-5a67bc764d78 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1008.814669] env[62204]: DEBUG nova.compute.manager [req-4c89fa66-7e02-4b14-a791-b214b68236e4 req-a9b3edde-2591-4754-8345-d207cb855645 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Received event network-vif-deleted-cb48dbbb-646f-445c-89d1-8c4a9e36de59 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1008.815887] env[62204]: INFO nova.compute.manager [req-4c89fa66-7e02-4b14-a791-b214b68236e4 req-a9b3edde-2591-4754-8345-d207cb855645 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Neutron deleted interface cb48dbbb-646f-445c-89d1-8c4a9e36de59; detaching it from the instance and deleting it from the info cache [ 1008.815887] env[62204]: DEBUG nova.network.neutron [req-4c89fa66-7e02-4b14-a791-b214b68236e4 req-a9b3edde-2591-4754-8345-d207cb855645 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.885916] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200306, 'name': PowerOffVM_Task, 'duration_secs': 0.347358} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.886218] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.886386] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.886652] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2837789-113f-44a0-b54d-85abac6ac88d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.917286] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.917553] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.922019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.922019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.156577] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.156822] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.157033] env[62204]: DEBUG nova.network.neutron [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1009.231473] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86fa8fe0-5bdc-4b87-a70d-1460aedfd7aa tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.851s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.271400] env[62204]: DEBUG nova.compute.utils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.273024] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1009.273158] env[62204]: DEBUG nova.network.neutron [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1009.283753] env[62204]: DEBUG nova.network.neutron [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.297777] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.298380] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.298380] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleting the datastore file [datastore2] 2178b629-4be6-473b-9a75-19efa234d442 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.299100] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96137898-4136-4ee5-9a6f-d0ba1d143dcd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.305312] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1009.305312] env[62204]: value = "task-1200308" [ 1009.305312] env[62204]: _type = "Task" [ 1009.305312] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.316291] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.317595] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea3ec55c-eca1-4e01-bcbd-798deb549928 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.328289] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84382e8b-13ce-44a9-bd22-4903ba519974 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.342466] env[62204]: DEBUG nova.policy [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1009.367622] env[62204]: DEBUG nova.compute.manager [req-4c89fa66-7e02-4b14-a791-b214b68236e4 req-a9b3edde-2591-4754-8345-d207cb855645 service nova] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Detach interface failed, port_id=cb48dbbb-646f-445c-89d1-8c4a9e36de59, reason: Instance c0990e53-70c9-4536-b26a-bc00bd457c56 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1009.419842] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1009.423391] env[62204]: DEBUG nova.compute.utils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1009.524203] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfba2c4-f2f2-4eb1-811d-b3bfda0e0a38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.533195] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363410f7-cb73-4699-bd8d-ab44f771b555 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.566337] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.566586] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.566821] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.567035] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.567215] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.569552] env[62204]: INFO nova.compute.manager [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Terminating instance [ 1009.571315] env[62204]: DEBUG nova.compute.manager [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1009.571512] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.572467] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e6b07f-390f-4a70-bf75-afa56e3018bf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.575584] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93f5228-117f-48b7-ad18-68ba593762fb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.584542] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d492feb-a4ae-43d0-86a6-3683459ef313 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.588150] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.588383] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea50a557-7bf1-4015-977c-c06bf7a427bc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.599323] env[62204]: DEBUG nova.compute.provider_tree [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.654409] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1009.655408] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1009.655408] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleting the datastore file [datastore1] 1c52b662-e436-4e0c-a77b-0f2fc1041a7d {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1009.655408] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4134b8f1-4464-4854-8c00-bac2b95f79c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.666376] env[62204]: DEBUG oslo_vmware.api [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1009.666376] env[62204]: value = "task-1200310" [ 1009.666376] env[62204]: _type = "Task" [ 1009.666376] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.674479] env[62204]: DEBUG oslo_vmware.api [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.714466] env[62204]: DEBUG nova.network.neutron [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Successfully created port: b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.777304] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1009.786101] env[62204]: INFO nova.compute.manager [-] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Took 1.36 seconds to deallocate network for instance. [ 1009.819583] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.900970] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.901247] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.901460] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.901646] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.901815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.904453] env[62204]: INFO nova.compute.manager [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Terminating instance [ 1009.906414] env[62204]: DEBUG nova.compute.manager [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1009.906607] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.907451] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d9fc3f-cbcc-4bce-9723-1c95e1e52a0e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.914387] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.914584] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9342dbd-85ae-4889-9f9b-041b704d71c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.922444] env[62204]: DEBUG oslo_vmware.api [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1009.922444] env[62204]: value = "task-1200311" [ 1009.922444] env[62204]: _type = "Task" [ 1009.922444] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.931302] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.937113] env[62204]: DEBUG oslo_vmware.api [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200311, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.946816] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.101874] env[62204]: DEBUG nova.scheduler.client.report [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1010.134434] env[62204]: DEBUG nova.network.neutron [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.176811] env[62204]: DEBUG oslo_vmware.api [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36862} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.177579] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.177811] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.178012] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.178202] env[62204]: INFO nova.compute.manager [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1010.178472] env[62204]: DEBUG oslo.service.loopingcall [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.179797] env[62204]: DEBUG nova.compute.manager [-] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1010.179904] env[62204]: DEBUG nova.network.neutron [-] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1010.295636] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.320222] env[62204]: DEBUG oslo_vmware.api [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.515302} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.320222] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1010.320222] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1010.320222] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1010.320222] env[62204]: INFO nova.compute.manager [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Took 1.96 seconds to destroy the instance on the hypervisor. [ 1010.320222] env[62204]: DEBUG oslo.service.loopingcall [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1010.320222] env[62204]: DEBUG nova.compute.manager [-] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1010.320644] env[62204]: DEBUG nova.network.neutron [-] [instance: 2178b629-4be6-473b-9a75-19efa234d442] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1010.432196] env[62204]: DEBUG oslo_vmware.api [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200311, 'name': PowerOffVM_Task, 'duration_secs': 0.266752} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.432485] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.432796] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.433181] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ef08f24-a342-4dd7-b905-76ffe273f4d6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.506987] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.506987] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.506987] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleting the datastore file [datastore1] d97d792d-614f-42e3-8516-6c0a7cf15ad5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.510139] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d8b57ed-df9f-4b3b-af6f-ad8cf9bc8190 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.513609] env[62204]: DEBUG oslo_vmware.api [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1010.513609] env[62204]: value = "task-1200313" [ 1010.513609] env[62204]: _type = "Task" [ 1010.513609] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.524570] env[62204]: DEBUG oslo_vmware.api [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200313, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.604440] env[62204]: DEBUG nova.compute.manager [req-3a923b72-b1cd-4729-9fcc-c7303d3fc57e req-59e44633-27ff-4e23-93d7-456ea4135ec1 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Received event network-vif-deleted-ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.604676] env[62204]: INFO nova.compute.manager [req-3a923b72-b1cd-4729-9fcc-c7303d3fc57e req-59e44633-27ff-4e23-93d7-456ea4135ec1 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Neutron deleted interface ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c; detaching it from the instance and deleting it from the info cache [ 1010.604929] env[62204]: DEBUG nova.network.neutron [req-3a923b72-b1cd-4729-9fcc-c7303d3fc57e req-59e44633-27ff-4e23-93d7-456ea4135ec1 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.607529] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1010.612338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.407s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.612712] env[62204]: DEBUG nova.objects.instance [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'resources' on Instance uuid 4dc4546f-85e6-4259-9ccd-a7396669eace {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.633976] env[62204]: INFO nova.scheduler.client.report [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Deleted allocations for instance 274285e5-fc23-48b4-b0d6-5a67bc764d78 [ 1010.638070] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.685982] env[62204]: DEBUG oslo_concurrency.lockutils [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1010.686270] env[62204]: DEBUG oslo_concurrency.lockutils [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1010.787888] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1010.814027] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1010.814183] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1010.814254] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1010.814444] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1010.814601] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1010.814793] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1010.815018] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1010.815189] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1010.815359] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1010.815527] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1010.815711] env[62204]: DEBUG nova.virt.hardware [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1010.816596] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927e56b5-acdf-4c74-aede-d39afa6a307c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.824825] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ff3b7e-be8b-460f-b3ad-ddcf30c4e003 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.845131] env[62204]: DEBUG nova.compute.manager [req-a3461069-e589-422a-8de8-14ce9c947a61 req-6e29b69c-8251-4ac8-81ee-479fbc2ec0d0 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Received event network-vif-deleted-2d9bc2f9-f421-48d9-a636-57fac9c47255 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1010.845332] env[62204]: INFO nova.compute.manager [req-a3461069-e589-422a-8de8-14ce9c947a61 req-6e29b69c-8251-4ac8-81ee-479fbc2ec0d0 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Neutron deleted interface 2d9bc2f9-f421-48d9-a636-57fac9c47255; detaching it from the instance and deleting it from the info cache [ 1010.845493] env[62204]: DEBUG nova.network.neutron [req-a3461069-e589-422a-8de8-14ce9c947a61 req-6e29b69c-8251-4ac8-81ee-479fbc2ec0d0 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.008421] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.008675] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.008920] env[62204]: INFO nova.compute.manager [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Attaching volume e2243127-d9d5-46bb-a37c-1bd28401ffa5 to /dev/sdb [ 1011.010600] env[62204]: DEBUG nova.network.neutron [-] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.025166] env[62204]: DEBUG oslo_vmware.api [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200313, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158525} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.026032] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.026236] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.026624] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.026624] env[62204]: INFO nova.compute.manager [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1011.026743] env[62204]: DEBUG oslo.service.loopingcall [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.026961] env[62204]: DEBUG nova.compute.manager [-] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.027054] env[62204]: DEBUG nova.network.neutron [-] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1011.061683] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f3ad70-852c-43b9-ba75-2f47618d31ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.068735] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f75793-148d-4018-a73a-cf22a292a011 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.082017] env[62204]: DEBUG nova.virt.block_device [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating existing volume attachment record: 5e9cbf2a-2f63-4874-9e28-dbbada063f09 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1011.084396] env[62204]: DEBUG nova.network.neutron [-] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.108444] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-70b4f975-1523-4b33-ac40-731cb7744a87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.121723] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8980d66b-fd42-4472-afb9-892b6086b9b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.149503] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4fe64711-1027-4cfe-9f7e-ae3dc575c22e tempest-AttachInterfacesTestJSON-2006825327 tempest-AttachInterfacesTestJSON-2006825327-project-member] Lock "274285e5-fc23-48b4-b0d6-5a67bc764d78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.951s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.159215] env[62204]: DEBUG nova.compute.manager [req-3a923b72-b1cd-4729-9fcc-c7303d3fc57e req-59e44633-27ff-4e23-93d7-456ea4135ec1 service nova] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Detach interface failed, port_id=ba57ac2e-ab3a-4f89-8431-6ee5eceebd8c, reason: Instance 2178b629-4be6-473b-9a75-19efa234d442 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1011.181716] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6e6ca1-52d3-44be-8ea1-b531eefc26d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.200076] env[62204]: INFO nova.compute.manager [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Detaching volume e7cba0e4-1e22-4040-be6e-c078b7175758 [ 1011.205717] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1bc186-8a79-466a-9513-417fa426709e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.215428] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1011.251183] env[62204]: INFO nova.virt.block_device [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Attempting to driver detach volume e7cba0e4-1e22-4040-be6e-c078b7175758 from mountpoint /dev/sdb [ 1011.251439] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1011.251623] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260125', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'name': 'volume-e7cba0e4-1e22-4040-be6e-c078b7175758', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'serial': 'e7cba0e4-1e22-4040-be6e-c078b7175758'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1011.252807] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5025fec6-eedd-4060-be33-4833d0185585 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.284253] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcffe389-4bf5-470a-b684-52dc2787561e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.292557] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c679499c-34cf-422a-a14b-571e20eff257 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.320262] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f298f10-c58b-46a9-8038-d724625a3dcc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.335111] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] The volume has not been displaced from its original location: [datastore2] volume-e7cba0e4-1e22-4040-be6e-c078b7175758/volume-e7cba0e4-1e22-4040-be6e-c078b7175758.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1011.340375] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1011.342985] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f6ca11f-f9b2-4ea9-a664-be47fc407d7b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.356480] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2929585e-e2db-4bf3-9395-78a06f531cb6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.365592] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a069fe-65e8-4ed6-ac4d-ba269092b1d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.379750] env[62204]: DEBUG oslo_vmware.api [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1011.379750] env[62204]: value = "task-1200315" [ 1011.379750] env[62204]: _type = "Task" [ 1011.379750] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.391881] env[62204]: DEBUG oslo_vmware.api [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200315, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.400040] env[62204]: DEBUG nova.compute.manager [req-a3461069-e589-422a-8de8-14ce9c947a61 req-6e29b69c-8251-4ac8-81ee-479fbc2ec0d0 service nova] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Detach interface failed, port_id=2d9bc2f9-f421-48d9-a636-57fac9c47255, reason: Instance 1c52b662-e436-4e0c-a77b-0f2fc1041a7d could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1011.470133] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1363c3e7-48ba-46bc-9c79-1c24514b54c9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.481147] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853540ea-2921-4895-95c4-8bb32d149a13 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.516259] env[62204]: INFO nova.compute.manager [-] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Took 1.34 seconds to deallocate network for instance. [ 1011.519333] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95015c6-e971-479e-b54f-a5288b6f77eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.532229] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea2c598-a066-4e5d-9576-bf5d9270c6e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.548331] env[62204]: DEBUG nova.compute.provider_tree [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.587149] env[62204]: INFO nova.compute.manager [-] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Took 1.27 seconds to deallocate network for instance. [ 1011.721798] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.722311] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d86b8e7-0a98-4387-b868-146ce55722ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.732652] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1011.732652] env[62204]: value = "task-1200318" [ 1011.732652] env[62204]: _type = "Task" [ 1011.732652] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.741640] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.745464] env[62204]: DEBUG nova.network.neutron [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Successfully updated port: b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1011.891501] env[62204]: DEBUG oslo_vmware.api [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200315, 'name': ReconfigVM_Task, 'duration_secs': 0.274054} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.892246] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1011.897572] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6dbb7acb-a390-483a-a1c0-e5a385e03481 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.912790] env[62204]: DEBUG oslo_vmware.api [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1011.912790] env[62204]: value = "task-1200319" [ 1011.912790] env[62204]: _type = "Task" [ 1011.912790] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.921552] env[62204]: DEBUG oslo_vmware.api [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200319, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.026554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.038310] env[62204]: DEBUG nova.network.neutron [-] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.050476] env[62204]: DEBUG nova.scheduler.client.report [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1012.095290] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1012.240485] env[62204]: DEBUG oslo_vmware.api [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200318, 'name': PowerOnVM_Task, 'duration_secs': 0.443519} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.240712] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.240935] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d756d8e7-15eb-4e42-9a79-048c69a17f58 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance '57e14d47-1d3f-4fed-93c1-11cfc17dc9bc' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1012.248263] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-032bbedb-7663-45a3-b2d0-37570d38f573" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.248263] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-032bbedb-7663-45a3-b2d0-37570d38f573" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.248263] env[62204]: DEBUG nova.network.neutron [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1012.423209] env[62204]: DEBUG oslo_vmware.api [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200319, 'name': ReconfigVM_Task, 'duration_secs': 0.161754} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.423991] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260125', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'name': 'volume-e7cba0e4-1e22-4040-be6e-c078b7175758', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e7cba0e4-1e22-4040-be6e-c078b7175758', 'serial': 'e7cba0e4-1e22-4040-be6e-c078b7175758'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1012.541048] env[62204]: INFO nova.compute.manager [-] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Took 1.51 seconds to deallocate network for instance. [ 1012.555780] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1012.558653] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.612s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1012.560166] env[62204]: INFO nova.compute.claims [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.587982] env[62204]: INFO nova.scheduler.client.report [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted allocations for instance 4dc4546f-85e6-4259-9ccd-a7396669eace [ 1012.640310] env[62204]: DEBUG nova.compute.manager [req-f2b8c413-88b9-4513-b341-67f283d1a62d req-f483f22a-8a8d-4957-9a85-621f1137ee8a service nova] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Received event network-vif-deleted-7394819f-3d04-4685-a087-5a61976b658a {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.943960] env[62204]: DEBUG nova.network.neutron [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1012.971756] env[62204]: DEBUG nova.objects.instance [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 21056adb-d81e-45bd-b354-1bcb488d2ed9 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.047727] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.073882] env[62204]: DEBUG nova.compute.manager [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Received event network-vif-plugged-b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1013.073882] env[62204]: DEBUG oslo_concurrency.lockutils [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] Acquiring lock "032bbedb-7663-45a3-b2d0-37570d38f573-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.073882] env[62204]: DEBUG oslo_concurrency.lockutils [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] Lock "032bbedb-7663-45a3-b2d0-37570d38f573-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.073882] env[62204]: DEBUG oslo_concurrency.lockutils [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] Lock "032bbedb-7663-45a3-b2d0-37570d38f573-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.073882] env[62204]: DEBUG nova.compute.manager [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] No waiting events found dispatching network-vif-plugged-b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1013.073882] env[62204]: WARNING nova.compute.manager [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Received unexpected event network-vif-plugged-b6783952-cb00-4f07-907c-d302559ad37d for instance with vm_state building and task_state spawning. [ 1013.074086] env[62204]: DEBUG nova.compute.manager [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Received event network-changed-b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1013.076826] env[62204]: DEBUG nova.compute.manager [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Refreshing instance network info cache due to event network-changed-b6783952-cb00-4f07-907c-d302559ad37d. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1013.078492] env[62204]: DEBUG oslo_concurrency.lockutils [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] Acquiring lock "refresh_cache-032bbedb-7663-45a3-b2d0-37570d38f573" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1013.099930] env[62204]: DEBUG oslo_concurrency.lockutils [None req-437f5cbc-8ad5-4b6c-97fc-b4239fb56d48 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "4dc4546f-85e6-4259-9ccd-a7396669eace" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.216s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1013.215506] env[62204]: DEBUG nova.network.neutron [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Updating instance_info_cache with network_info: [{"id": "b6783952-cb00-4f07-907c-d302559ad37d", "address": "fa:16:3e:8c:b3:b6", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6783952-cb", "ovs_interfaceid": "b6783952-cb00-4f07-907c-d302559ad37d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.718203] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-032bbedb-7663-45a3-b2d0-37570d38f573" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1013.718540] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Instance network_info: |[{"id": "b6783952-cb00-4f07-907c-d302559ad37d", "address": "fa:16:3e:8c:b3:b6", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6783952-cb", "ovs_interfaceid": "b6783952-cb00-4f07-907c-d302559ad37d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1013.721773] env[62204]: DEBUG oslo_concurrency.lockutils [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] Acquired lock "refresh_cache-032bbedb-7663-45a3-b2d0-37570d38f573" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.721976] env[62204]: DEBUG nova.network.neutron [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Refreshing network info cache for port b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1013.723100] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:b3:b6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6783952-cb00-4f07-907c-d302559ad37d', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1013.733953] env[62204]: DEBUG oslo.service.loopingcall [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1013.735668] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1013.735909] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f2f0f63-ff4f-4f5f-9874-0761da474d44 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.769126] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1013.769126] env[62204]: value = "task-1200321" [ 1013.769126] env[62204]: _type = "Task" [ 1013.769126] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.782413] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200321, 'name': CreateVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.836198] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5baa7a48-2e76-45c4-9974-14b7227bc728 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.846255] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd5975e-15d5-4917-bcc9-14fc01737fe1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.880070] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b6e344-d761-47ae-8b1f-28a691758cae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.888734] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c347adcc-76aa-4236-819e-91eeee2bd706 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.903513] env[62204]: DEBUG nova.compute.provider_tree [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.983576] env[62204]: DEBUG oslo_concurrency.lockutils [None req-03fb029d-2a03-4563-8ef5-2e930a8e4b91 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.295s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.024862] env[62204]: DEBUG oslo_concurrency.lockutils [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.025527] env[62204]: DEBUG oslo_concurrency.lockutils [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.142697] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.142994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.143662] env[62204]: DEBUG nova.compute.manager [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Going to confirm migration 4 {{(pid=62204) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1014.283476] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200321, 'name': CreateVM_Task, 'duration_secs': 0.324051} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.283656] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1014.284413] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.284585] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.284944] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1014.285182] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c29de351-3b2a-4a19-b3a8-33cb87057907 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.290029] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1014.290029] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f97d11-a26f-4ce8-a5f7-37dc701ca328" [ 1014.290029] env[62204]: _type = "Task" [ 1014.290029] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.299662] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f97d11-a26f-4ce8-a5f7-37dc701ca328, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.409520] env[62204]: DEBUG nova.scheduler.client.report [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1014.530100] env[62204]: INFO nova.compute.manager [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Detaching volume e661c092-1fad-4e6e-8508-fde31280e224 [ 1014.568808] env[62204]: DEBUG nova.network.neutron [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Updated VIF entry in instance network info cache for port b6783952-cb00-4f07-907c-d302559ad37d. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1014.568808] env[62204]: DEBUG nova.network.neutron [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Updating instance_info_cache with network_info: [{"id": "b6783952-cb00-4f07-907c-d302559ad37d", "address": "fa:16:3e:8c:b3:b6", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6783952-cb", "ovs_interfaceid": "b6783952-cb00-4f07-907c-d302559ad37d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.573733] env[62204]: INFO nova.virt.block_device [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Attempting to driver detach volume e661c092-1fad-4e6e-8508-fde31280e224 from mountpoint /dev/sdc [ 1014.573939] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1014.574160] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260129', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'name': 'volume-e661c092-1fad-4e6e-8508-fde31280e224', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'serial': 'e661c092-1fad-4e6e-8508-fde31280e224'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1014.575377] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f80876-0b89-42e0-9800-802414873d0e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.598887] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74fd4b3-c722-4acf-bf2a-34b6e03cab1d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.607182] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6bbd34-c6ce-488b-9a83-593bf62a8dee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.627873] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1b1411-564f-45e8-a82e-3626d8083f08 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.642622] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] The volume has not been displaced from its original location: [datastore1] volume-e661c092-1fad-4e6e-8508-fde31280e224/volume-e661c092-1fad-4e6e-8508-fde31280e224.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1014.647913] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfiguring VM instance instance-0000005d to detach disk 2002 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1014.651245] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a09d56a-f7ad-485c-998d-c11bc6382c97 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.670846] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1014.670846] env[62204]: value = "task-1200322" [ 1014.670846] env[62204]: _type = "Task" [ 1014.670846] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.680651] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.703076] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.703439] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.703532] env[62204]: DEBUG nova.network.neutron [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1014.703638] env[62204]: DEBUG nova.objects.instance [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'info_cache' on Instance uuid 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1014.800820] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f97d11-a26f-4ce8-a5f7-37dc701ca328, 'name': SearchDatastore_Task, 'duration_secs': 0.011515} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.801248] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.801502] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.802228] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1014.802228] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.802228] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1014.802399] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b836fcd-453e-4859-9eb7-34832d1a0f8f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.811066] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1014.811316] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1014.812152] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-023795f7-0c1c-457a-845b-b94f94330060 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.817636] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1014.817636] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dae2e4-bdc8-bf89-1b48-d60878230896" [ 1014.817636] env[62204]: _type = "Task" [ 1014.817636] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.825074] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dae2e4-bdc8-bf89-1b48-d60878230896, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.914822] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1014.915398] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1014.918121] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.623s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1014.918302] env[62204]: DEBUG nova.objects.instance [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lazy-loading 'resources' on Instance uuid c0990e53-70c9-4536-b26a-bc00bd457c56 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.070958] env[62204]: DEBUG oslo_concurrency.lockutils [req-6a238a4b-7cd8-4cbd-baf2-b453e5ee0d05 req-3e4e0bb0-bfa5-40fb-9563-7b4b968ae1fe service nova] Releasing lock "refresh_cache-032bbedb-7663-45a3-b2d0-37570d38f573" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.181156] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200322, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.328590] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dae2e4-bdc8-bf89-1b48-d60878230896, 'name': SearchDatastore_Task, 'duration_secs': 0.009554} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.329243] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a189b233-48fa-4e86-8245-1297f56add8a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.334800] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1015.334800] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bedd68-08a6-6968-c0a2-530b152751eb" [ 1015.334800] env[62204]: _type = "Task" [ 1015.334800] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.341759] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bedd68-08a6-6968-c0a2-530b152751eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.422070] env[62204]: DEBUG nova.compute.utils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1015.426821] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1015.426821] env[62204]: DEBUG nova.network.neutron [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1015.471790] env[62204]: DEBUG nova.policy [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0a2edea246e74173bbdb4365d0309cd7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be5f3f8b28ab4b63a2621b1fe1383af0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1015.605315] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.605552] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.635683] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c97055-c952-48b8-9d1c-bd003d6e2db1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.643119] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f95712-2b76-4075-9740-0af92562a98f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.647578] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1015.647795] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260131', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'name': 'volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a93880fc-e517-4d83-98c1-9ce2405bf9d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'serial': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1015.648657] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226b6ae9-e294-4b75-b726-3564e826b7a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.688558] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872feab5-edd9-40ba-9e8f-aa5153a1f052 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.695078] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2313738-4996-412d-846d-1511f7a5d8b0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.724385] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5/volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.727232] env[62204]: DEBUG nova.network.neutron [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Successfully created port: 7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1015.731908] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-775a106b-8caa-4dfd-8f2c-ecf637f4398b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.744781] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200322, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.746796] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b50004f-acaf-4418-9b9d-7366f8536001 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.762182] env[62204]: DEBUG nova.compute.provider_tree [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.764172] env[62204]: DEBUG oslo_vmware.api [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1015.764172] env[62204]: value = "task-1200323" [ 1015.764172] env[62204]: _type = "Task" [ 1015.764172] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.772655] env[62204]: DEBUG oslo_vmware.api [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.845632] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bedd68-08a6-6968-c0a2-530b152751eb, 'name': SearchDatastore_Task, 'duration_secs': 0.045457} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.846160] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1015.846246] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 032bbedb-7663-45a3-b2d0-37570d38f573/032bbedb-7663-45a3-b2d0-37570d38f573.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1015.846464] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16952c60-b5cf-40cb-8ff4-35b10f1d9612 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.852613] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1015.852613] env[62204]: value = "task-1200324" [ 1015.852613] env[62204]: _type = "Task" [ 1015.852613] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.861297] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200324, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.927473] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1016.108583] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1016.186432] env[62204]: DEBUG nova.network.neutron [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [{"id": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "address": "fa:16:3e:d5:25:7f", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e81e820-35", "ovs_interfaceid": "4e81e820-357d-4b7e-900f-aaac4c7c2798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.203978] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200322, 'name': ReconfigVM_Task, 'duration_secs': 1.250668} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.204490] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Reconfigured VM instance instance-0000005d to detach disk 2002 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1016.212343] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9d7aef1-3b48-4e59-abb5-92588bb5e59f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.236531] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1016.236531] env[62204]: value = "task-1200325" [ 1016.236531] env[62204]: _type = "Task" [ 1016.236531] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.244834] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200325, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.265717] env[62204]: DEBUG nova.scheduler.client.report [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1016.277994] env[62204]: DEBUG oslo_vmware.api [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200323, 'name': ReconfigVM_Task, 'duration_secs': 0.497408} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.278162] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to attach disk [datastore1] volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5/volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.283770] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d671a2e-0034-4621-b858-0aadc5aa1eec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.301364] env[62204]: DEBUG oslo_vmware.api [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1016.301364] env[62204]: value = "task-1200326" [ 1016.301364] env[62204]: _type = "Task" [ 1016.301364] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.312660] env[62204]: DEBUG oslo_vmware.api [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200326, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.363616] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200324, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.634997] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.691437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1016.691437] env[62204]: DEBUG nova.objects.instance [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'migration_context' on Instance uuid 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.745457] env[62204]: DEBUG oslo_vmware.api [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200325, 'name': ReconfigVM_Task, 'duration_secs': 0.155811} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.745813] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260129', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'name': 'volume-e661c092-1fad-4e6e-8508-fde31280e224', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '21056adb-d81e-45bd-b354-1bcb488d2ed9', 'attached_at': '', 'detached_at': '', 'volume_id': 'e661c092-1fad-4e6e-8508-fde31280e224', 'serial': 'e661c092-1fad-4e6e-8508-fde31280e224'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1016.774206] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.776463] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.750s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.776744] env[62204]: DEBUG nova.objects.instance [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'resources' on Instance uuid 1c52b662-e436-4e0c-a77b-0f2fc1041a7d {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1016.791402] env[62204]: INFO nova.scheduler.client.report [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Deleted allocations for instance c0990e53-70c9-4536-b26a-bc00bd457c56 [ 1016.811045] env[62204]: DEBUG oslo_vmware.api [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200326, 'name': ReconfigVM_Task, 'duration_secs': 0.157521} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.811420] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260131', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'name': 'volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a93880fc-e517-4d83-98c1-9ce2405bf9d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'serial': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1016.862767] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200324, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528196} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.863115] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 032bbedb-7663-45a3-b2d0-37570d38f573/032bbedb-7663-45a3-b2d0-37570d38f573.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1016.863314] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1016.863577] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d9b566b-cdb9-4b3c-8319-39a598acfdfa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.869428] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1016.869428] env[62204]: value = "task-1200327" [ 1016.869428] env[62204]: _type = "Task" [ 1016.869428] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.876179] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200327, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.937275] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1016.962260] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1016.962528] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1016.962692] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.962876] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1016.963041] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.963209] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1016.963417] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1016.963585] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1016.963752] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1016.963918] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1016.964116] env[62204]: DEBUG nova.virt.hardware [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1016.964975] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2d3157-915c-450c-ae7b-9608cfa38656 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.972440] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72577d0-71ee-49da-8a3d-37f04606b9bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.138228] env[62204]: DEBUG nova.compute.manager [req-ff4778b6-50f7-4df5-8362-962967531bed req-28a8657a-991a-4672-900c-37ce3a744d88 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Received event network-vif-plugged-7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1017.138471] env[62204]: DEBUG oslo_concurrency.lockutils [req-ff4778b6-50f7-4df5-8362-962967531bed req-28a8657a-991a-4672-900c-37ce3a744d88 service nova] Acquiring lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.138683] env[62204]: DEBUG oslo_concurrency.lockutils [req-ff4778b6-50f7-4df5-8362-962967531bed req-28a8657a-991a-4672-900c-37ce3a744d88 service nova] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.138855] env[62204]: DEBUG oslo_concurrency.lockutils [req-ff4778b6-50f7-4df5-8362-962967531bed req-28a8657a-991a-4672-900c-37ce3a744d88 service nova] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.139039] env[62204]: DEBUG nova.compute.manager [req-ff4778b6-50f7-4df5-8362-962967531bed req-28a8657a-991a-4672-900c-37ce3a744d88 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] No waiting events found dispatching network-vif-plugged-7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1017.139211] env[62204]: WARNING nova.compute.manager [req-ff4778b6-50f7-4df5-8362-962967531bed req-28a8657a-991a-4672-900c-37ce3a744d88 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Received unexpected event network-vif-plugged-7cd482c2-c3eb-4a81-934b-4c959a24a664 for instance with vm_state building and task_state spawning. [ 1017.195961] env[62204]: DEBUG nova.objects.base [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Object Instance<57e14d47-1d3f-4fed-93c1-11cfc17dc9bc> lazy-loaded attributes: info_cache,migration_context {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1017.195961] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08ea9e4-1126-4e4b-b5ac-fb558f3adef0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.217085] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db1a3d35-2b98-4a0a-b80a-2300032d9c96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.224390] env[62204]: DEBUG oslo_vmware.api [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1017.224390] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526a6eab-32b2-f5b0-a7c7-dc1789804dee" [ 1017.224390] env[62204]: _type = "Task" [ 1017.224390] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.230430] env[62204]: DEBUG oslo_vmware.api [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526a6eab-32b2-f5b0-a7c7-dc1789804dee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.286316] env[62204]: DEBUG nova.objects.instance [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'flavor' on Instance uuid 21056adb-d81e-45bd-b354-1bcb488d2ed9 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.301482] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cc69c47c-b903-4616-aafd-232263c5a083 tempest-ServersNegativeTestJSON-585823771 tempest-ServersNegativeTestJSON-585823771-project-member] Lock "c0990e53-70c9-4536-b26a-bc00bd457c56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.993s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.380919] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200327, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067106} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.381159] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.383792] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b272684-00ba-40b7-8f8c-3853e0d08c46 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.407516] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 032bbedb-7663-45a3-b2d0-37570d38f573/032bbedb-7663-45a3-b2d0-37570d38f573.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.410143] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9ae1076-2fb8-489c-92cd-9d248c9e4387 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.429144] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1017.429144] env[62204]: value = "task-1200328" [ 1017.429144] env[62204]: _type = "Task" [ 1017.429144] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.437010] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.509599] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16371c30-6f4b-45b3-a979-8f62cc60542e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.517518] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ad52d6-d9d1-4cbe-a99d-540befc24ac0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.547904] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ce5b56-3e05-4263-b3f8-a744c8b238ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.555084] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91bde14-03c9-423f-b308-4b28c9ce56e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.568197] env[62204]: DEBUG nova.compute.provider_tree [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1017.734376] env[62204]: DEBUG oslo_vmware.api [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526a6eab-32b2-f5b0-a7c7-dc1789804dee, 'name': SearchDatastore_Task, 'duration_secs': 0.017434} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.735338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.742278] env[62204]: DEBUG nova.network.neutron [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Successfully updated port: 7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1017.769611] env[62204]: DEBUG nova.compute.manager [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Received event network-changed-7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1017.769611] env[62204]: DEBUG nova.compute.manager [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Refreshing instance network info cache due to event network-changed-7cd482c2-c3eb-4a81-934b-4c959a24a664. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1017.769611] env[62204]: DEBUG oslo_concurrency.lockutils [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] Acquiring lock "refresh_cache-d3be85d1-34b6-4b00-9740-c3abdb4b0734" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1017.769611] env[62204]: DEBUG oslo_concurrency.lockutils [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] Acquired lock "refresh_cache-d3be85d1-34b6-4b00-9740-c3abdb4b0734" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.769611] env[62204]: DEBUG nova.network.neutron [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Refreshing network info cache for port 7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1017.846393] env[62204]: DEBUG nova.objects.instance [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'flavor' on Instance uuid a93880fc-e517-4d83-98c1-9ce2405bf9d5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.941890] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200328, 'name': ReconfigVM_Task, 'duration_secs': 0.284712} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.941890] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 032bbedb-7663-45a3-b2d0-37570d38f573/032bbedb-7663-45a3-b2d0-37570d38f573.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.941890] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2461102-f630-4143-96db-7f25bfbf05f0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.949019] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1017.949019] env[62204]: value = "task-1200329" [ 1017.949019] env[62204]: _type = "Task" [ 1017.949019] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.955101] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200329, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.073022] env[62204]: DEBUG nova.scheduler.client.report [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1018.249769] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "refresh_cache-d3be85d1-34b6-4b00-9740-c3abdb4b0734" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.293754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-38c0e17f-a9fb-4451-864a-db2d2462115a tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.267s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.307059] env[62204]: DEBUG nova.network.neutron [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1018.352222] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4e7eb8e0-5e89-4e79-9605-0c8070245aab tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.343s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.458424] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200329, 'name': Rename_Task, 'duration_secs': 0.153615} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.458424] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.458424] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05932c03-a814-4346-b45e-c5dbcb11d4ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.467140] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1018.467140] env[62204]: value = "task-1200330" [ 1018.467140] env[62204]: _type = "Task" [ 1018.467140] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.467140] env[62204]: DEBUG nova.network.neutron [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.476643] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.577861] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.800s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.582484] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.487s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.582484] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.584251] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.537s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.588635] env[62204]: DEBUG nova.objects.instance [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'resources' on Instance uuid d97d792d-614f-42e3-8516-6c0a7cf15ad5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.606534] env[62204]: INFO nova.scheduler.client.report [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocations for instance 1c52b662-e436-4e0c-a77b-0f2fc1041a7d [ 1018.622794] env[62204]: INFO nova.scheduler.client.report [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocations for instance 2178b629-4be6-473b-9a75-19efa234d442 [ 1018.973492] env[62204]: DEBUG oslo_concurrency.lockutils [req-2c6afe3e-3a20-4522-b9cf-c6b32e38bb3d req-a26a34ed-85a7-4b25-b929-0a078fee197b service nova] Releasing lock "refresh_cache-d3be85d1-34b6-4b00-9740-c3abdb4b0734" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1018.974276] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "refresh_cache-d3be85d1-34b6-4b00-9740-c3abdb4b0734" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.974276] env[62204]: DEBUG nova.network.neutron [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1018.981832] env[62204]: DEBUG oslo_vmware.api [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200330, 'name': PowerOnVM_Task, 'duration_secs': 0.440408} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.982573] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.982573] env[62204]: INFO nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Took 8.19 seconds to spawn the instance on the hypervisor. [ 1018.982705] env[62204]: DEBUG nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1018.983469] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6f3d71-18de-41b6-a64f-bce83e4b7ccb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.118038] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6209fee3-6d12-464b-8088-acc8e86c13b4 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "1c52b662-e436-4e0c-a77b-0f2fc1041a7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.551s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.138396] env[62204]: DEBUG oslo_concurrency.lockutils [None req-55954c9a-a842-4599-a3b8-aaffd4ceb111 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "2178b629-4be6-473b-9a75-19efa234d442" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.783s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.278362] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00a45ee-8f8a-4bbd-9641-a9ad0a653d61 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.285827] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99d4357-1319-4c96-a8f1-19bc5fd20750 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.317925] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44840ec-f77d-49c1-9e7a-20d6add338e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.326606] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d16464a-6ca7-425e-ab5a-72a15eb06add {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.341360] env[62204]: DEBUG nova.compute.provider_tree [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.503967] env[62204]: INFO nova.compute.manager [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Took 15.43 seconds to build instance. [ 1019.512113] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.512383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.512586] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "21056adb-d81e-45bd-b354-1bcb488d2ed9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.512774] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.512944] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.515702] env[62204]: INFO nova.compute.manager [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Terminating instance [ 1019.517485] env[62204]: DEBUG nova.compute.manager [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1019.517927] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1019.518744] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a0851b-baa1-42b0-a8ba-e433c1f4c2fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.526850] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.527108] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-667b1720-cc41-4de1-aac1-d73274e39dca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.529601] env[62204]: DEBUG nova.network.neutron [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1019.533307] env[62204]: DEBUG oslo_vmware.api [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1019.533307] env[62204]: value = "task-1200331" [ 1019.533307] env[62204]: _type = "Task" [ 1019.533307] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.540938] env[62204]: DEBUG oslo_vmware.api [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.629618] env[62204]: DEBUG nova.compute.manager [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Stashing vm_state: active {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1019.788784] env[62204]: DEBUG nova.network.neutron [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Updating instance_info_cache with network_info: [{"id": "7cd482c2-c3eb-4a81-934b-4c959a24a664", "address": "fa:16:3e:83:09:b8", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cd482c2-c3", "ovs_interfaceid": "7cd482c2-c3eb-4a81-934b-4c959a24a664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1019.845153] env[62204]: DEBUG nova.scheduler.client.report [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.005712] env[62204]: DEBUG oslo_concurrency.lockutils [None req-42ea3f66-b568-445c-b303-3a4d09970941 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "032bbedb-7663-45a3-b2d0-37570d38f573" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.942s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.043760] env[62204]: DEBUG oslo_vmware.api [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200331, 'name': PowerOffVM_Task, 'duration_secs': 0.453353} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.043853] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.044476] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.044797] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5141a1c6-cc21-46f3-8299-407a5c526a60 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.122218] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.122476] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.122660] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Deleting the datastore file [datastore2] 21056adb-d81e-45bd-b354-1bcb488d2ed9 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.123050] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f03f4742-b55f-42c0-adc0-e64076d01c88 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.129556] env[62204]: DEBUG oslo_vmware.api [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for the task: (returnval){ [ 1020.129556] env[62204]: value = "task-1200333" [ 1020.129556] env[62204]: _type = "Task" [ 1020.129556] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.139493] env[62204]: DEBUG oslo_vmware.api [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.148896] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.292286] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "refresh_cache-d3be85d1-34b6-4b00-9740-c3abdb4b0734" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.292641] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Instance network_info: |[{"id": "7cd482c2-c3eb-4a81-934b-4c959a24a664", "address": "fa:16:3e:83:09:b8", "network": {"id": "7ecce742-0067-4d63-b426-6c4a343e8048", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-156397545-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "be5f3f8b28ab4b63a2621b1fe1383af0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15538852-1a3f-4f71-b4a9-4923c5837c4f", "external-id": "nsx-vlan-transportzone-165", "segmentation_id": 165, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cd482c2-c3", "ovs_interfaceid": "7cd482c2-c3eb-4a81-934b-4c959a24a664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1020.293121] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:09:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15538852-1a3f-4f71-b4a9-4923c5837c4f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cd482c2-c3eb-4a81-934b-4c959a24a664', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1020.302353] env[62204]: DEBUG oslo.service.loopingcall [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.302624] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1020.302888] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adc963cb-780f-43ac-8449-06ac4f776468 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.324064] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1020.324064] env[62204]: value = "task-1200334" [ 1020.324064] env[62204]: _type = "Task" [ 1020.324064] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.331845] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200334, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.350863] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.766s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.354048] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.719s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.356098] env[62204]: INFO nova.compute.claims [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.372846] env[62204]: INFO nova.scheduler.client.report [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleted allocations for instance d97d792d-614f-42e3-8516-6c0a7cf15ad5 [ 1020.639539] env[62204]: DEBUG oslo_vmware.api [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Task: {'id': task-1200333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134457} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.639850] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1020.640084] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1020.640281] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1020.640482] env[62204]: INFO nova.compute.manager [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1020.641272] env[62204]: DEBUG oslo.service.loopingcall [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1020.642728] env[62204]: DEBUG nova.compute.manager [-] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1020.642949] env[62204]: DEBUG nova.network.neutron [-] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1020.648076] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "8081d981-42c4-46e4-82e7-2f8b59a68465" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.648305] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.834585] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200334, 'name': CreateVM_Task, 'duration_secs': 0.352489} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.834752] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1020.835476] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1020.835655] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.835986] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1020.836256] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fabab225-366c-43ab-b79c-20e5d063becd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.840863] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1020.840863] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52000069-1b99-5c5b-9516-e7c071951cbf" [ 1020.840863] env[62204]: _type = "Task" [ 1020.840863] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.849565] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52000069-1b99-5c5b-9516-e7c071951cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.879477] env[62204]: DEBUG oslo_concurrency.lockutils [None req-51a2074a-17c3-4235-b9a0-686cb2a3d302 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "d97d792d-614f-42e3-8516-6c0a7cf15ad5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.978s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.915268] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "032bbedb-7663-45a3-b2d0-37570d38f573" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.915554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "032bbedb-7663-45a3-b2d0-37570d38f573" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.915763] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "032bbedb-7663-45a3-b2d0-37570d38f573-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1020.915959] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "032bbedb-7663-45a3-b2d0-37570d38f573-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.916152] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "032bbedb-7663-45a3-b2d0-37570d38f573-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1020.919659] env[62204]: INFO nova.compute.manager [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Terminating instance [ 1020.922964] env[62204]: DEBUG nova.compute.manager [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1020.923182] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.924205] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00c641c-1f58-4722-9e10-79ec8e447f80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.934817] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.935136] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ce3694e-ad92-4703-a535-2dad24e1d2b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.942925] env[62204]: DEBUG oslo_vmware.api [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1020.942925] env[62204]: value = "task-1200335" [ 1020.942925] env[62204]: _type = "Task" [ 1020.942925] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.953192] env[62204]: DEBUG oslo_vmware.api [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200335, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.151173] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1021.245894] env[62204]: DEBUG nova.compute.manager [req-1c2b0461-d219-48eb-8c19-f5fad8f2bd03 req-c52395a1-627b-4901-a0b2-643be71997aa service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Received event network-vif-deleted-3874d0d3-36f6-4cab-a204-a05bf0fb54ac {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1021.246172] env[62204]: INFO nova.compute.manager [req-1c2b0461-d219-48eb-8c19-f5fad8f2bd03 req-c52395a1-627b-4901-a0b2-643be71997aa service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Neutron deleted interface 3874d0d3-36f6-4cab-a204-a05bf0fb54ac; detaching it from the instance and deleting it from the info cache [ 1021.246377] env[62204]: DEBUG nova.network.neutron [req-1c2b0461-d219-48eb-8c19-f5fad8f2bd03 req-c52395a1-627b-4901-a0b2-643be71997aa service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.352473] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52000069-1b99-5c5b-9516-e7c071951cbf, 'name': SearchDatastore_Task, 'duration_secs': 0.02002} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.352805] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1021.353050] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1021.353293] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1021.353543] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.353622] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1021.353880] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeb503f5-4963-4c05-8cda-ef0d1c348e38 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.361971] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1021.362026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1021.362716] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccda987b-1fa8-4319-bb91-12c3c958dd39 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.371203] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1021.371203] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a3054b-b2b7-b301-6503-ef936e24804d" [ 1021.371203] env[62204]: _type = "Task" [ 1021.371203] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.380015] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a3054b-b2b7-b301-6503-ef936e24804d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.451445] env[62204]: DEBUG oslo_vmware.api [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200335, 'name': PowerOffVM_Task, 'duration_secs': 0.211486} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.453825] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.454022] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.454920] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52d4c7f4-c545-4bc7-a75c-7e0b135b46a8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.517883] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.518131] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.518371] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore1] 032bbedb-7663-45a3-b2d0-37570d38f573 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.518582] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cecbd3e7-f1fe-45f9-818f-554e1ac98cc0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.527942] env[62204]: DEBUG oslo_vmware.api [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1021.527942] env[62204]: value = "task-1200338" [ 1021.527942] env[62204]: _type = "Task" [ 1021.527942] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.535949] env[62204]: DEBUG oslo_vmware.api [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.559490] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f973318f-76cf-452a-8a12-0dec257a5da2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.566149] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67075884-c1fa-454b-ba5d-a8948a145828 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.595191] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3c0a1d-9f67-41fc-b106-471bdf595a9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.602357] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b28240-cdd6-4ce9-8414-3c2d9209099d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.614969] env[62204]: DEBUG nova.compute.provider_tree [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.673798] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.725877] env[62204]: DEBUG nova.network.neutron [-] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.748813] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f08d372-4113-4b98-95ed-5edc8a58ac6f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.758154] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d8e79e-b7b6-4c3b-a83c-d329ab8c458a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.787556] env[62204]: DEBUG nova.compute.manager [req-1c2b0461-d219-48eb-8c19-f5fad8f2bd03 req-c52395a1-627b-4901-a0b2-643be71997aa service nova] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Detach interface failed, port_id=3874d0d3-36f6-4cab-a204-a05bf0fb54ac, reason: Instance 21056adb-d81e-45bd-b354-1bcb488d2ed9 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1021.881289] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a3054b-b2b7-b301-6503-ef936e24804d, 'name': SearchDatastore_Task, 'duration_secs': 0.008448} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.882179] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13236ba6-6647-4135-953e-01aeba8d7709 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.888046] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1021.888046] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522f5bae-9ab6-81d6-8f9b-f23c0d5eb65b" [ 1021.888046] env[62204]: _type = "Task" [ 1021.888046] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.895148] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522f5bae-9ab6-81d6-8f9b-f23c0d5eb65b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.038245] env[62204]: DEBUG oslo_vmware.api [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15207} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.038631] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.038890] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.039170] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.039422] env[62204]: INFO nova.compute.manager [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1022.039761] env[62204]: DEBUG oslo.service.loopingcall [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1022.040046] env[62204]: DEBUG nova.compute.manager [-] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1022.040179] env[62204]: DEBUG nova.network.neutron [-] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1022.117901] env[62204]: DEBUG nova.scheduler.client.report [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1022.228881] env[62204]: INFO nova.compute.manager [-] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Took 1.59 seconds to deallocate network for instance. [ 1022.398379] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522f5bae-9ab6-81d6-8f9b-f23c0d5eb65b, 'name': SearchDatastore_Task, 'duration_secs': 0.009004} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.398617] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1022.398945] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d3be85d1-34b6-4b00-9740-c3abdb4b0734/d3be85d1-34b6-4b00-9740-c3abdb4b0734.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1022.399232] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7bb9caa-33f2-4549-90aa-a15ec7e7f47f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.405245] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1022.405245] env[62204]: value = "task-1200339" [ 1022.405245] env[62204]: _type = "Task" [ 1022.405245] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.413367] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200339, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.623450] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.623998] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1022.627007] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.892s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.737691] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.770319] env[62204]: DEBUG nova.network.neutron [-] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.915275] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200339, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.449791} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.915564] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] d3be85d1-34b6-4b00-9740-c3abdb4b0734/d3be85d1-34b6-4b00-9740-c3abdb4b0734.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1022.915857] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1022.916141] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-083b7bbd-6bae-461e-812a-fcc504f507ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.923293] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1022.923293] env[62204]: value = "task-1200340" [ 1022.923293] env[62204]: _type = "Task" [ 1022.923293] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.931845] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.131448] env[62204]: DEBUG nova.compute.utils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1023.136111] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1023.137627] env[62204]: DEBUG nova.network.neutron [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1023.193030] env[62204]: DEBUG nova.policy [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b52997d8756d4096b3dcba62f0bd14b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e70013d6da84d2b9a0719621c9f2c1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1023.273587] env[62204]: INFO nova.compute.manager [-] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Took 1.23 seconds to deallocate network for instance. [ 1023.277715] env[62204]: DEBUG nova.compute.manager [req-68d9cffe-a9b9-491d-b060-82e372f94e10 req-bef99169-7dfc-492e-862e-ae36deb718f5 service nova] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Received event network-vif-deleted-b6783952-cb00-4f07-907c-d302559ad37d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1023.323729] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fb63e0-b7e7-426a-afed-89d4fda29e58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.331901] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08945a1-44b9-4b99-bfdb-e82e8d89f76b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.362367] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b009134-ecf6-4625-9e10-56c8478bf067 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.369613] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760d8c48-d895-4d2f-ad69-32852e0d8601 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.382511] env[62204]: DEBUG nova.compute.provider_tree [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.433219] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062041} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.433550] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1023.434437] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91e51f0-540c-475b-87bc-c77b53b8acac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.457180] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] d3be85d1-34b6-4b00-9740-c3abdb4b0734/d3be85d1-34b6-4b00-9740-c3abdb4b0734.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1023.457491] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-341014d9-026e-41c0-a412-72f50a2afa18 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.153718] env[62204]: DEBUG nova.network.neutron [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Successfully created port: 043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.156168] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1024.159172] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.159853] env[62204]: DEBUG nova.scheduler.client.report [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1024.169024] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1024.169024] env[62204]: value = "task-1200341" [ 1024.169024] env[62204]: _type = "Task" [ 1024.169024] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.177555] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.678970] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200341, 'name': ReconfigVM_Task, 'duration_secs': 0.276957} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.679300] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Reconfigured VM instance instance-00000068 to attach disk [datastore1] d3be85d1-34b6-4b00-9740-c3abdb4b0734/d3be85d1-34b6-4b00-9740-c3abdb4b0734.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1024.679871] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17006b3d-954c-40fe-9078-47cf8594c41f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.686924] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1024.686924] env[62204]: value = "task-1200342" [ 1024.686924] env[62204]: _type = "Task" [ 1024.686924] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.693866] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200342, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.780700] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "6dc170a4-b08e-44b5-a152-832670e6866b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.781020] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "6dc170a4-b08e-44b5-a152-832670e6866b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.781212] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "6dc170a4-b08e-44b5-a152-832670e6866b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1024.781403] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "6dc170a4-b08e-44b5-a152-832670e6866b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1024.781579] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "6dc170a4-b08e-44b5-a152-832670e6866b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.784313] env[62204]: INFO nova.compute.manager [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Terminating instance [ 1024.786329] env[62204]: DEBUG nova.compute.manager [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1024.786519] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.787391] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3211f9-81da-44df-b465-d0b7f1d9e193 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.794485] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1024.794736] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5bdd985-e965-4c3e-9a24-48afe25b66db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.801424] env[62204]: DEBUG oslo_vmware.api [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1024.801424] env[62204]: value = "task-1200343" [ 1024.801424] env[62204]: _type = "Task" [ 1024.801424] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.809514] env[62204]: DEBUG oslo_vmware.api [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.172313] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1025.175273] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.548s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.179302] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.030s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.196407] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200342, 'name': Rename_Task, 'duration_secs': 0.129324} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.199227] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1025.200025] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6e501da-eb41-419e-9803-66610a8d273f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.206273] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1025.206516] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1025.207276] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.207628] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1025.207929] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.208238] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1025.208611] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1025.208912] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1025.209290] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1025.209614] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1025.209939] env[62204]: DEBUG nova.virt.hardware [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1025.211557] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2e7e69-d3f1-4f5f-bb23-45cd69f2b1e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.218450] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1025.218450] env[62204]: value = "task-1200344" [ 1025.218450] env[62204]: _type = "Task" [ 1025.218450] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.228678] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ffcbc-5210-4d77-83da-2c1599cd7408 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.241237] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200344, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.312158] env[62204]: DEBUG oslo_vmware.api [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200343, 'name': PowerOffVM_Task, 'duration_secs': 0.20608} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.312481] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.312656] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.312908] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4471fe71-32f4-402c-bb8a-e9b42127f41c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.379440] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1025.379717] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1025.379927] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleting the datastore file [datastore2] 6dc170a4-b08e-44b5-a152-832670e6866b {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.380220] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e045dd0-c5e6-482f-af01-f05a1e4b2a32 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.386639] env[62204]: DEBUG oslo_vmware.api [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1025.386639] env[62204]: value = "task-1200346" [ 1025.386639] env[62204]: _type = "Task" [ 1025.386639] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.393975] env[62204]: DEBUG oslo_vmware.api [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.688727] env[62204]: INFO nova.compute.claims [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.694396] env[62204]: DEBUG nova.compute.manager [req-0247a453-64cc-44f2-9617-cfc851d18860 req-161503b8-997a-4143-aa96-63ce2a02377a service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Received event network-vif-plugged-043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1025.694691] env[62204]: DEBUG oslo_concurrency.lockutils [req-0247a453-64cc-44f2-9617-cfc851d18860 req-161503b8-997a-4143-aa96-63ce2a02377a service nova] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.694996] env[62204]: DEBUG oslo_concurrency.lockutils [req-0247a453-64cc-44f2-9617-cfc851d18860 req-161503b8-997a-4143-aa96-63ce2a02377a service nova] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.695272] env[62204]: DEBUG oslo_concurrency.lockutils [req-0247a453-64cc-44f2-9617-cfc851d18860 req-161503b8-997a-4143-aa96-63ce2a02377a service nova] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.695529] env[62204]: DEBUG nova.compute.manager [req-0247a453-64cc-44f2-9617-cfc851d18860 req-161503b8-997a-4143-aa96-63ce2a02377a service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] No waiting events found dispatching network-vif-plugged-043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1025.695786] env[62204]: WARNING nova.compute.manager [req-0247a453-64cc-44f2-9617-cfc851d18860 req-161503b8-997a-4143-aa96-63ce2a02377a service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Received unexpected event network-vif-plugged-043fbcf5-77f4-4656-91a6-01f89dd81e08 for instance with vm_state building and task_state spawning. [ 1025.731300] env[62204]: DEBUG oslo_vmware.api [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200344, 'name': PowerOnVM_Task, 'duration_secs': 0.447042} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.731706] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1025.732041] env[62204]: INFO nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Took 8.79 seconds to spawn the instance on the hypervisor. [ 1025.732325] env[62204]: DEBUG nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1025.733286] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68e3743-b8ca-4bfc-aa41-ce48159bbfbc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.753433] env[62204]: INFO nova.scheduler.client.report [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted allocation for migration d79ae017-636d-4eab-a832-79fdb917977d [ 1025.897325] env[62204]: DEBUG oslo_vmware.api [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170871} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.897605] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1025.897789] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1025.897966] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1025.898155] env[62204]: INFO nova.compute.manager [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1025.898399] env[62204]: DEBUG oslo.service.loopingcall [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1025.898589] env[62204]: DEBUG nova.compute.manager [-] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1025.898682] env[62204]: DEBUG nova.network.neutron [-] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1026.199087] env[62204]: INFO nova.compute.resource_tracker [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating resource usage from migration b845b6d3-628e-4d2f-927c-4121550b3604 [ 1026.219479] env[62204]: DEBUG nova.compute.manager [req-41d6e37a-ea43-4d4b-b45f-e75fa433a227 req-b101c4be-e729-428a-bfe0-c29bb641b778 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Received event network-vif-deleted-06dfadf2-c796-4fd2-a53f-55cb955837a9 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1026.219479] env[62204]: INFO nova.compute.manager [req-41d6e37a-ea43-4d4b-b45f-e75fa433a227 req-b101c4be-e729-428a-bfe0-c29bb641b778 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Neutron deleted interface 06dfadf2-c796-4fd2-a53f-55cb955837a9; detaching it from the instance and deleting it from the info cache [ 1026.219479] env[62204]: DEBUG nova.network.neutron [req-41d6e37a-ea43-4d4b-b45f-e75fa433a227 req-b101c4be-e729-428a-bfe0-c29bb641b778 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.251955] env[62204]: DEBUG nova.network.neutron [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Successfully updated port: 043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1026.255148] env[62204]: INFO nova.compute.manager [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Took 16.33 seconds to build instance. [ 1026.258243] env[62204]: DEBUG oslo_concurrency.lockutils [None req-aeda470c-42b8-4a4a-872d-1ba9012b7206 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.115s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.279555] env[62204]: DEBUG nova.compute.manager [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Received event network-changed-043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1026.279941] env[62204]: DEBUG nova.compute.manager [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Refreshing instance network info cache due to event network-changed-043fbcf5-77f4-4656-91a6-01f89dd81e08. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1026.280585] env[62204]: DEBUG oslo_concurrency.lockutils [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] Acquiring lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.280929] env[62204]: DEBUG oslo_concurrency.lockutils [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] Acquired lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.281174] env[62204]: DEBUG nova.network.neutron [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Refreshing network info cache for port 043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1026.386744] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5aa3e04-0e8e-43b0-9f9e-793e3b0ed6d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.394338] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c635eceb-1b1a-4f64-91ba-63e3fc947b6a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.424576] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a77295eb-3b38-44a1-9523-39622e270e3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.431808] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b11c426-c02f-4770-b9f7-72c293825435 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.445193] env[62204]: DEBUG nova.compute.provider_tree [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.652500] env[62204]: DEBUG nova.network.neutron [-] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.724647] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ab6aba0-4327-4a16-8302-b7937a696355 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.733987] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8354496-42d2-4323-9862-0e37749c74bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.764808] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1026.765295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-49467450-81c4-4584-9f5c-204d079c9b70 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.848s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.765832] env[62204]: DEBUG nova.compute.manager [req-41d6e37a-ea43-4d4b-b45f-e75fa433a227 req-b101c4be-e729-428a-bfe0-c29bb641b778 service nova] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Detach interface failed, port_id=06dfadf2-c796-4fd2-a53f-55cb955837a9, reason: Instance 6dc170a4-b08e-44b5-a152-832670e6866b could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1026.831168] env[62204]: DEBUG nova.network.neutron [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1026.947726] env[62204]: DEBUG nova.scheduler.client.report [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1026.962709] env[62204]: DEBUG nova.network.neutron [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.154482] env[62204]: INFO nova.compute.manager [-] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Took 1.26 seconds to deallocate network for instance. [ 1027.436891] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.437144] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.437369] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.437554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.437726] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.439878] env[62204]: INFO nova.compute.manager [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Terminating instance [ 1027.441588] env[62204]: DEBUG nova.compute.manager [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1027.441801] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.442634] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fdb8e1-96e5-481a-8022-3964cccab882 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.450366] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.450579] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31523f0c-0b49-40e5-9eaf-e8101fb3f77b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.453841] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.275s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.454047] env[62204]: INFO nova.compute.manager [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Migrating [ 1027.461244] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.788s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.462637] env[62204]: INFO nova.compute.claims [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.467219] env[62204]: DEBUG oslo_vmware.api [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1027.467219] env[62204]: value = "task-1200347" [ 1027.467219] env[62204]: _type = "Task" [ 1027.467219] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.470357] env[62204]: DEBUG oslo_concurrency.lockutils [req-654dc02d-f8ca-47ca-8020-015cdf2e6249 req-73d25cb4-4996-4b17-898e-202e8f3a1e97 service nova] Releasing lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1027.476016] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.476217] env[62204]: DEBUG nova.network.neutron [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1027.486684] env[62204]: DEBUG oslo_vmware.api [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.542360] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.542632] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.542854] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.543062] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1027.543242] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.545564] env[62204]: INFO nova.compute.manager [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Terminating instance [ 1027.547475] env[62204]: DEBUG nova.compute.manager [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1027.547669] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.548517] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f1348b-5503-4100-8bfb-2a4d27f6101d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.556345] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.556584] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02597c49-1a46-49a7-a693-ab10ef839fe3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.562867] env[62204]: DEBUG oslo_vmware.api [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1027.562867] env[62204]: value = "task-1200348" [ 1027.562867] env[62204]: _type = "Task" [ 1027.562867] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.570597] env[62204]: DEBUG oslo_vmware.api [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.661731] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1027.979859] env[62204]: DEBUG oslo_vmware.api [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200347, 'name': PowerOffVM_Task, 'duration_secs': 0.167016} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.980727] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.980911] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.981174] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97b8714c-0e2e-484e-a64d-64e99a8cd340 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.984860] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1027.985081] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.985245] env[62204]: DEBUG nova.network.neutron [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1028.017025] env[62204]: DEBUG nova.network.neutron [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1028.047464] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.047707] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.047913] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleting the datastore file [datastore1] 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.048213] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19d49d88-ecce-45de-b1f0-ec232a842bd8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.054448] env[62204]: DEBUG oslo_vmware.api [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1028.054448] env[62204]: value = "task-1200350" [ 1028.054448] env[62204]: _type = "Task" [ 1028.054448] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.064579] env[62204]: DEBUG oslo_vmware.api [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.071850] env[62204]: DEBUG oslo_vmware.api [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200348, 'name': PowerOffVM_Task, 'duration_secs': 0.274909} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.072122] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.072289] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.072538] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98338535-9765-43b9-a787-60fa4d5959d5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.159576] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.159892] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.160169] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleting the datastore file [datastore1] d3be85d1-34b6-4b00-9740-c3abdb4b0734 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.160533] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5beb71b0-3c47-4b31-a112-7ca7648fd436 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.166586] env[62204]: DEBUG oslo_vmware.api [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for the task: (returnval){ [ 1028.166586] env[62204]: value = "task-1200352" [ 1028.166586] env[62204]: _type = "Task" [ 1028.166586] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.175224] env[62204]: DEBUG oslo_vmware.api [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200352, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.236506] env[62204]: DEBUG nova.network.neutron [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Updating instance_info_cache with network_info: [{"id": "043fbcf5-77f4-4656-91a6-01f89dd81e08", "address": "fa:16:3e:86:0b:29", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043fbcf5-77", "ovs_interfaceid": "043fbcf5-77f4-4656-91a6-01f89dd81e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.564137] env[62204]: DEBUG oslo_vmware.api [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151421} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.566356] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.566559] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.566744] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.566941] env[62204]: INFO nova.compute.manager [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1028.567209] env[62204]: DEBUG oslo.service.loopingcall [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.568207] env[62204]: DEBUG nova.compute.manager [-] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1028.568207] env[62204]: DEBUG nova.network.neutron [-] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.639846] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96eb9ff2-7df5-4a64-9e48-c9ac902ca5bb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.647127] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f5c97-eb17-47a6-b5dc-655e8cafb30f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.680890] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900676a4-feb2-4e97-ba2d-57368b25d05c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.691571] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21b3ef5-32d1-4901-9b2a-dd3b82cf5c4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.695162] env[62204]: DEBUG oslo_vmware.api [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Task: {'id': task-1200352, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150328} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.695409] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.695599] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.695764] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.695939] env[62204]: INFO nova.compute.manager [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1028.696192] env[62204]: DEBUG oslo.service.loopingcall [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.696728] env[62204]: DEBUG nova.compute.manager [-] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1028.696912] env[62204]: DEBUG nova.network.neutron [-] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1028.706038] env[62204]: DEBUG nova.compute.provider_tree [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.739494] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1028.739781] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Instance network_info: |[{"id": "043fbcf5-77f4-4656-91a6-01f89dd81e08", "address": "fa:16:3e:86:0b:29", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043fbcf5-77", "ovs_interfaceid": "043fbcf5-77f4-4656-91a6-01f89dd81e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1028.740266] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:0b:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '043fbcf5-77f4-4656-91a6-01f89dd81e08', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1028.748033] env[62204]: DEBUG oslo.service.loopingcall [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1028.748542] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1028.748787] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ecdc665-e001-4110-8103-c63a006cf829 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.765487] env[62204]: DEBUG nova.network.neutron [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.771275] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1028.771275] env[62204]: value = "task-1200353" [ 1028.771275] env[62204]: _type = "Task" [ 1028.771275] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.780802] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200353, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.958047] env[62204]: DEBUG nova.compute.manager [req-c2cec0c0-c56c-470a-9019-b2ee754bb51e req-467ea46e-259b-44c4-99fb-66e84d8daf44 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Received event network-vif-deleted-7cd482c2-c3eb-4a81-934b-4c959a24a664 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.958306] env[62204]: INFO nova.compute.manager [req-c2cec0c0-c56c-470a-9019-b2ee754bb51e req-467ea46e-259b-44c4-99fb-66e84d8daf44 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Neutron deleted interface 7cd482c2-c3eb-4a81-934b-4c959a24a664; detaching it from the instance and deleting it from the info cache [ 1028.958552] env[62204]: DEBUG nova.network.neutron [req-c2cec0c0-c56c-470a-9019-b2ee754bb51e req-467ea46e-259b-44c4-99fb-66e84d8daf44 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.061846] env[62204]: DEBUG nova.compute.manager [req-21d0e2c8-2268-4aee-873d-18f85f050417 req-4c7eee6c-5308-4ea5-ac50-a9a409a0ec39 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Received event network-vif-deleted-4e81e820-357d-4b7e-900f-aaac4c7c2798 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1029.062128] env[62204]: INFO nova.compute.manager [req-21d0e2c8-2268-4aee-873d-18f85f050417 req-4c7eee6c-5308-4ea5-ac50-a9a409a0ec39 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Neutron deleted interface 4e81e820-357d-4b7e-900f-aaac4c7c2798; detaching it from the instance and deleting it from the info cache [ 1029.062238] env[62204]: DEBUG nova.network.neutron [req-21d0e2c8-2268-4aee-873d-18f85f050417 req-4c7eee6c-5308-4ea5-ac50-a9a409a0ec39 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.208617] env[62204]: DEBUG nova.scheduler.client.report [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1029.268682] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.281114] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200353, 'name': CreateVM_Task, 'duration_secs': 0.326983} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.281283] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1029.282501] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.282671] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.282993] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1029.283514] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99442ce2-4c66-482c-97f2-5da173c7b68d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.288358] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1029.288358] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52681f08-28ae-1814-4365-5a6847d67177" [ 1029.288358] env[62204]: _type = "Task" [ 1029.288358] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.297493] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52681f08-28ae-1814-4365-5a6847d67177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.433536] env[62204]: DEBUG nova.network.neutron [-] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.461370] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3cca32f9-4c27-410e-a252-3a0910da44c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.472078] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d97bf3-e3c2-4303-8781-becb365541bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.499836] env[62204]: DEBUG nova.compute.manager [req-c2cec0c0-c56c-470a-9019-b2ee754bb51e req-467ea46e-259b-44c4-99fb-66e84d8daf44 service nova] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Detach interface failed, port_id=7cd482c2-c3eb-4a81-934b-4c959a24a664, reason: Instance d3be85d1-34b6-4b00-9740-c3abdb4b0734 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1029.536131] env[62204]: DEBUG nova.network.neutron [-] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.565475] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8228272a-a8f9-487f-94e0-3669ee4a6274 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.573539] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a8199f-08a6-47c1-a769-3a24985ea3f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.602513] env[62204]: DEBUG nova.compute.manager [req-21d0e2c8-2268-4aee-873d-18f85f050417 req-4c7eee6c-5308-4ea5-ac50-a9a409a0ec39 service nova] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Detach interface failed, port_id=4e81e820-357d-4b7e-900f-aaac4c7c2798, reason: Instance 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1029.714125] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.714677] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1029.717889] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.980s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.718072] env[62204]: DEBUG nova.objects.instance [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lazy-loading 'resources' on Instance uuid 21056adb-d81e-45bd-b354-1bcb488d2ed9 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1029.799473] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52681f08-28ae-1814-4365-5a6847d67177, 'name': SearchDatastore_Task, 'duration_secs': 0.0096} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.799831] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.800097] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1029.800349] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1029.800544] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.800715] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1029.800932] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d0fa27f-f737-4f0a-ad95-5e01b35da143 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.810152] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1029.810152] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1029.810873] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6db2cd7-a025-46be-9bc6-80c4132e1632 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.816099] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1029.816099] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520f516f-bd7b-c822-2796-7eaf346c6f85" [ 1029.816099] env[62204]: _type = "Task" [ 1029.816099] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.823625] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520f516f-bd7b-c822-2796-7eaf346c6f85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.937040] env[62204]: INFO nova.compute.manager [-] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Took 1.24 seconds to deallocate network for instance. [ 1030.038701] env[62204]: INFO nova.compute.manager [-] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Took 1.47 seconds to deallocate network for instance. [ 1030.220916] env[62204]: DEBUG nova.compute.utils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1030.225917] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1030.225917] env[62204]: DEBUG nova.network.neutron [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1030.275313] env[62204]: DEBUG nova.policy [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6054f141cad7421f85bbb5944f408070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6907df6f17b142c0b4881f15f3b88a9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1030.331188] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]520f516f-bd7b-c822-2796-7eaf346c6f85, 'name': SearchDatastore_Task, 'duration_secs': 0.011069} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.335374] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad6e50d2-50ce-4cb9-810a-e4bca61b24fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.340612] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1030.340612] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c924ce-288c-5db0-d990-35d5df245018" [ 1030.340612] env[62204]: _type = "Task" [ 1030.340612] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.350158] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c924ce-288c-5db0-d990-35d5df245018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.414343] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f3e72a-e2d3-4faa-bed3-e7a3f3ab96c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.422485] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c3d404-7d73-421c-b134-05943addccc2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.452991] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.453297] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b8ee92-9410-47c7-bbfa-e01ee6c9c608 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.461065] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e704a28b-b396-4c17-a5e2-331f4bf64884 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.473488] env[62204]: DEBUG nova.compute.provider_tree [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.545879] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.564850] env[62204]: DEBUG nova.network.neutron [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Successfully created port: 765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1030.726443] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1030.786047] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b214a6-823a-4c4e-af66-e82a7c60092e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.807043] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1030.851236] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c924ce-288c-5db0-d990-35d5df245018, 'name': SearchDatastore_Task, 'duration_secs': 0.012153} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.851720] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.851986] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba/c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1030.852267] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0a93c4f-6184-45c8-a998-81160f8cbca9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.859947] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1030.859947] env[62204]: value = "task-1200354" [ 1030.859947] env[62204]: _type = "Task" [ 1030.859947] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.867931] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.976444] env[62204]: DEBUG nova.scheduler.client.report [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.313528] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.313946] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5475b92-ca11-49da-ad11-05a1cf1fa5ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.321797] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1031.321797] env[62204]: value = "task-1200355" [ 1031.321797] env[62204]: _type = "Task" [ 1031.321797] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.331568] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.369955] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200354, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.481701] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.484153] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.325s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.484439] env[62204]: DEBUG nova.objects.instance [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid 032bbedb-7663-45a3-b2d0-37570d38f573 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.505729] env[62204]: INFO nova.scheduler.client.report [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Deleted allocations for instance 21056adb-d81e-45bd-b354-1bcb488d2ed9 [ 1031.737048] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1031.764162] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1031.764482] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1031.764653] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1031.764849] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1031.765052] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1031.765154] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1031.765381] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1031.765547] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1031.765719] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1031.765894] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1031.766494] env[62204]: DEBUG nova.virt.hardware [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1031.766981] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c6ae25-d3a4-4692-ad9f-d6d8c9ddcde7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.775128] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd946f3-fe9a-4587-aafe-69d6b2938307 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.830640] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200355, 'name': PowerOffVM_Task, 'duration_secs': 0.246891} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.830924] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.831136] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1031.869283] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520908} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.869551] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba/c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1031.869762] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1031.870696] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5538e03c-82e9-427c-8f76-75675ccc6669 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.877247] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1031.877247] env[62204]: value = "task-1200356" [ 1031.877247] env[62204]: _type = "Task" [ 1031.877247] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.886267] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200356, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.950802] env[62204]: DEBUG nova.compute.manager [req-1f49c2fa-b408-43be-9856-dfb65a0cc566 req-2014c678-fdbe-4c85-83be-79910ffbe327 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Received event network-vif-plugged-765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1031.950802] env[62204]: DEBUG oslo_concurrency.lockutils [req-1f49c2fa-b408-43be-9856-dfb65a0cc566 req-2014c678-fdbe-4c85-83be-79910ffbe327 service nova] Acquiring lock "8081d981-42c4-46e4-82e7-2f8b59a68465-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1031.950802] env[62204]: DEBUG oslo_concurrency.lockutils [req-1f49c2fa-b408-43be-9856-dfb65a0cc566 req-2014c678-fdbe-4c85-83be-79910ffbe327 service nova] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.950960] env[62204]: DEBUG oslo_concurrency.lockutils [req-1f49c2fa-b408-43be-9856-dfb65a0cc566 req-2014c678-fdbe-4c85-83be-79910ffbe327 service nova] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.951013] env[62204]: DEBUG nova.compute.manager [req-1f49c2fa-b408-43be-9856-dfb65a0cc566 req-2014c678-fdbe-4c85-83be-79910ffbe327 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] No waiting events found dispatching network-vif-plugged-765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1031.951186] env[62204]: WARNING nova.compute.manager [req-1f49c2fa-b408-43be-9856-dfb65a0cc566 req-2014c678-fdbe-4c85-83be-79910ffbe327 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Received unexpected event network-vif-plugged-765c298f-6dcb-4b6e-8b1b-f62c31fd21de for instance with vm_state building and task_state spawning. [ 1032.012587] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d58c5901-85f1-4666-9159-cc5357310049 tempest-AttachVolumeTestJSON-1660270746 tempest-AttachVolumeTestJSON-1660270746-project-member] Lock "21056adb-d81e-45bd-b354-1bcb488d2ed9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.500s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.049384] env[62204]: DEBUG nova.network.neutron [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Successfully updated port: 765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1032.160016] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ee58ee-14db-4145-b9e6-ce2f5cfe10a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.168049] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5def5db9-690a-4122-ad13-deab7a3a1941 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.199354] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b94f3d8-a9a5-4f25-8570-b986a3826a60 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.206317] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660ee3b4-543b-4bca-b734-29020e10c0f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.219733] env[62204]: DEBUG nova.compute.provider_tree [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.340043] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1032.340385] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1032.340463] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.340639] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1032.340790] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.341017] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1032.341246] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1032.341413] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1032.341581] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1032.341745] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1032.341918] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1032.347077] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00a675e4-ce77-4a29-b6ab-fa6a476038a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.364400] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1032.364400] env[62204]: value = "task-1200357" [ 1032.364400] env[62204]: _type = "Task" [ 1032.364400] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.372222] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.387585] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200356, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068549} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.387909] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1032.388671] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2832d570-5a57-421c-a0c5-5a6fde5ce44e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.411359] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba/c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1032.411645] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7246ec2a-d86f-415b-a216-ecb4b9924722 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.431560] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.431822] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.432008] env[62204]: INFO nova.compute.manager [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Shelving [ 1032.433508] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1032.433508] env[62204]: value = "task-1200358" [ 1032.433508] env[62204]: _type = "Task" [ 1032.433508] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.441912] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200358, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.551898] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-8081d981-42c4-46e4-82e7-2f8b59a68465" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.553061] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-8081d981-42c4-46e4-82e7-2f8b59a68465" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.553061] env[62204]: DEBUG nova.network.neutron [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.723059] env[62204]: DEBUG nova.scheduler.client.report [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.875430] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200357, 'name': ReconfigVM_Task, 'duration_secs': 0.232427} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.875769] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1032.944202] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1032.944506] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a94716c-ca88-4ccd-b1ba-cb64de68cbc1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.949242] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200358, 'name': ReconfigVM_Task, 'duration_secs': 0.28282} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.949826] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Reconfigured VM instance instance-00000069 to attach disk [datastore1] c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba/c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.950468] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-29160fc3-755f-4fb1-a7fa-90bed2a9ef22 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.954067] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1032.954067] env[62204]: value = "task-1200360" [ 1032.954067] env[62204]: _type = "Task" [ 1032.954067] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.957662] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1032.957662] env[62204]: value = "task-1200361" [ 1032.957662] env[62204]: _type = "Task" [ 1032.957662] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.965203] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200360, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.969821] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200361, 'name': Rename_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.088227] env[62204]: DEBUG nova.network.neutron [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1033.224195] env[62204]: DEBUG nova.network.neutron [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Updating instance_info_cache with network_info: [{"id": "765c298f-6dcb-4b6e-8b1b-f62c31fd21de", "address": "fa:16:3e:fd:47:52", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap765c298f-6d", "ovs_interfaceid": "765c298f-6dcb-4b6e-8b1b-f62c31fd21de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.227616] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.229873] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.568s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.230128] env[62204]: DEBUG nova.objects.instance [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'resources' on Instance uuid 6dc170a4-b08e-44b5-a152-832670e6866b {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.253840] env[62204]: INFO nova.scheduler.client.report [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance 032bbedb-7663-45a3-b2d0-37570d38f573 [ 1033.383070] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1033.383459] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1033.383459] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.383620] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1033.383785] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.383942] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1033.384168] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1033.384328] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1033.384494] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1033.384657] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1033.384873] env[62204]: DEBUG nova.virt.hardware [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1033.390056] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1033.390338] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a7cac2b-9879-4199-a2d0-0799453a3c13 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.408041] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1033.408041] env[62204]: value = "task-1200362" [ 1033.408041] env[62204]: _type = "Task" [ 1033.408041] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.415680] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.466827] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200361, 'name': Rename_Task, 'duration_secs': 0.132619} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.469724] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.470015] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200360, 'name': PowerOffVM_Task, 'duration_secs': 0.178708} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.470237] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2846e42-bac8-4a51-aa92-7dbfb27ee1d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.471689] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1033.472439] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c98c102-6878-4725-9196-69a677a18630 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.491859] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d89e939-9e81-49b6-af24-5e767c1190a1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.494518] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1033.494518] env[62204]: value = "task-1200363" [ 1033.494518] env[62204]: _type = "Task" [ 1033.494518] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.506024] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200363, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.726933] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-8081d981-42c4-46e4-82e7-2f8b59a68465" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.727404] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Instance network_info: |[{"id": "765c298f-6dcb-4b6e-8b1b-f62c31fd21de", "address": "fa:16:3e:fd:47:52", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap765c298f-6d", "ovs_interfaceid": "765c298f-6dcb-4b6e-8b1b-f62c31fd21de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1033.727891] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:47:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '765c298f-6dcb-4b6e-8b1b-f62c31fd21de', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.735259] env[62204]: DEBUG oslo.service.loopingcall [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1033.737980] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.738744] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d26bf42d-8e71-443e-9b0b-15e7bd435d9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.762930] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.762930] env[62204]: value = "task-1200364" [ 1033.762930] env[62204]: _type = "Task" [ 1033.762930] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.762930] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e4d05a68-5d74-43bb-88a3-51fd637590e9 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "032bbedb-7663-45a3-b2d0-37570d38f573" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.847s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.774062] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200364, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.877049] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c497d316-31ac-4a8a-9bb0-a710e4fbff8c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.885930] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cdce2a-cb96-4c76-b54c-95e76cb41763 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.919963] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e736e87d-c985-42b9-a9f9-09e56e190e05 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.929496] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad978fb-b660-4bd5-b047-51845dfd3abb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.933376] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200362, 'name': ReconfigVM_Task, 'duration_secs': 0.206735} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.933647] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1033.934779] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fc369c-319b-43ab-abf5-8d286d134ea9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.944769] env[62204]: DEBUG nova.compute.provider_tree [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1033.967869] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.968868] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dd25b44-3cf1-4ca6-bcec-2fa369983e62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.983517] env[62204]: DEBUG nova.compute.manager [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Received event network-changed-765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1033.983721] env[62204]: DEBUG nova.compute.manager [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Refreshing instance network info cache due to event network-changed-765c298f-6dcb-4b6e-8b1b-f62c31fd21de. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1033.983930] env[62204]: DEBUG oslo_concurrency.lockutils [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] Acquiring lock "refresh_cache-8081d981-42c4-46e4-82e7-2f8b59a68465" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.984091] env[62204]: DEBUG oslo_concurrency.lockutils [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] Acquired lock "refresh_cache-8081d981-42c4-46e4-82e7-2f8b59a68465" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.984258] env[62204]: DEBUG nova.network.neutron [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Refreshing network info cache for port 765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1033.992075] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1033.992075] env[62204]: value = "task-1200365" [ 1033.992075] env[62204]: _type = "Task" [ 1033.992075] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.006186] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1034.006926] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.009991] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2ad1970f-7a77-46bc-aa64-fcd64ee4b280 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.012240] env[62204]: DEBUG oslo_vmware.api [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200363, 'name': PowerOnVM_Task, 'duration_secs': 0.519577} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.012820] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.013071] env[62204]: INFO nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1034.013261] env[62204]: DEBUG nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1034.014390] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f27b5d-8c62-4288-b74a-e7c73f73fc79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.018100] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1034.018100] env[62204]: value = "task-1200366" [ 1034.018100] env[62204]: _type = "Task" [ 1034.018100] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.033801] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200366, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.138409] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "ade509d8-5d7c-4926-bb2f-067dce84f76c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.138753] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.138986] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "ade509d8-5d7c-4926-bb2f-067dce84f76c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1034.139197] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.139373] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.141891] env[62204]: INFO nova.compute.manager [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Terminating instance [ 1034.143958] env[62204]: DEBUG nova.compute.manager [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1034.144194] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1034.145097] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91354bc-5d3a-4963-8fc8-a79e364cb034 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.152604] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1034.152817] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-138c3e94-912e-4c39-af2d-eebb7af02ba7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.159507] env[62204]: DEBUG oslo_vmware.api [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1034.159507] env[62204]: value = "task-1200367" [ 1034.159507] env[62204]: _type = "Task" [ 1034.159507] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.167414] env[62204]: DEBUG oslo_vmware.api [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.273923] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200364, 'name': CreateVM_Task, 'duration_secs': 0.390906} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.274149] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.274909] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.275087] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.275465] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1034.275751] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36c013a1-f264-438c-b20d-8150636f31d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.281596] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1034.281596] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526ab07b-20f9-a997-ef4c-03308153dbd6" [ 1034.281596] env[62204]: _type = "Task" [ 1034.281596] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.290286] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526ab07b-20f9-a997-ef4c-03308153dbd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.447797] env[62204]: DEBUG nova.scheduler.client.report [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1034.502106] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200365, 'name': ReconfigVM_Task, 'duration_secs': 0.327987} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.502405] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to attach disk [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1034.502702] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1034.527034] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200366, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.535284] env[62204]: INFO nova.compute.manager [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Took 17.92 seconds to build instance. [ 1034.668947] env[62204]: DEBUG oslo_vmware.api [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200367, 'name': PowerOffVM_Task, 'duration_secs': 0.201788} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.669212] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1034.669386] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1034.669635] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f67c380-15e9-4d28-ac7f-94acc8dba358 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.693924] env[62204]: DEBUG nova.network.neutron [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Updated VIF entry in instance network info cache for port 765c298f-6dcb-4b6e-8b1b-f62c31fd21de. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1034.694356] env[62204]: DEBUG nova.network.neutron [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Updating instance_info_cache with network_info: [{"id": "765c298f-6dcb-4b6e-8b1b-f62c31fd21de", "address": "fa:16:3e:fd:47:52", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap765c298f-6d", "ovs_interfaceid": "765c298f-6dcb-4b6e-8b1b-f62c31fd21de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.734700] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1034.735053] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1034.735327] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore2] ade509d8-5d7c-4926-bb2f-067dce84f76c {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1034.735653] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8c664ce-d2d1-49e5-b1e5-1202a5a3cb26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.745783] env[62204]: DEBUG oslo_vmware.api [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1034.745783] env[62204]: value = "task-1200369" [ 1034.745783] env[62204]: _type = "Task" [ 1034.745783] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.754298] env[62204]: DEBUG oslo_vmware.api [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.794350] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]526ab07b-20f9-a997-ef4c-03308153dbd6, 'name': SearchDatastore_Task, 'duration_secs': 0.012472} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.794698] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.794970] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.795247] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.795395] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.795576] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.795839] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b6b4348-2914-4ef5-ba10-ad26776c5780 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.811606] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.811849] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.812669] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b1ca469-6d15-42f7-9127-258b223a57db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.818101] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1034.818101] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa2d4e-c866-671c-ee03-42b3949e7890" [ 1034.818101] env[62204]: _type = "Task" [ 1034.818101] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.825616] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa2d4e-c866-671c-ee03-42b3949e7890, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.953051] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.723s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1034.955634] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.503s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.956259] env[62204]: DEBUG nova.objects.instance [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lazy-loading 'resources' on Instance uuid d3be85d1-34b6-4b00-9740-c3abdb4b0734 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.977363] env[62204]: INFO nova.scheduler.client.report [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleted allocations for instance 6dc170a4-b08e-44b5-a152-832670e6866b [ 1035.010059] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f875e8-5e52-4c11-b872-e888f8d9e09a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.037394] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff63251-2838-4d75-945d-bfd34282a443 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.040705] env[62204]: DEBUG oslo_concurrency.lockutils [None req-48b2fb25-3adc-44a5-bbed-bb78536f231c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.435s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.045771] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200366, 'name': CreateSnapshot_Task, 'duration_secs': 0.760659} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.063263] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1035.063948] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1035.068756] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a11f78-75cd-494e-95d1-2103a039e925 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.196938] env[62204]: DEBUG oslo_concurrency.lockutils [req-a5af1fbe-c0e7-4b70-89a7-a1a44623f675 req-f27170a7-396d-4e14-be45-fd9803c20624 service nova] Releasing lock "refresh_cache-8081d981-42c4-46e4-82e7-2f8b59a68465" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.256400] env[62204]: DEBUG oslo_vmware.api [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287988} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.256678] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1035.256867] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1035.257144] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1035.257330] env[62204]: INFO nova.compute.manager [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1035.257580] env[62204]: DEBUG oslo.service.loopingcall [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1035.257780] env[62204]: DEBUG nova.compute.manager [-] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1035.257876] env[62204]: DEBUG nova.network.neutron [-] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1035.327785] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa2d4e-c866-671c-ee03-42b3949e7890, 'name': SearchDatastore_Task, 'duration_secs': 0.009555} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.328886] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c84cfd4f-02e2-49bd-9e8c-e05db589e045 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.333863] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1035.333863] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52621ff0-7eae-845e-ef10-4301687606bb" [ 1035.333863] env[62204]: _type = "Task" [ 1035.333863] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.341121] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52621ff0-7eae-845e-ef10-4301687606bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.495022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-388c2320-dd0f-4dd3-b8ab-acd4a91adb11 tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "6dc170a4-b08e-44b5-a152-832670e6866b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.711s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.586673] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1035.589169] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-baabad87-0e0e-4228-9a63-577ca7389959 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.597849] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1035.597849] env[62204]: value = "task-1200370" [ 1035.597849] env[62204]: _type = "Task" [ 1035.597849] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.608206] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200370, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.613148] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cee699-caec-4412-85a1-998e65de6331 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.620221] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa859aa6-837d-45a8-a6f1-74b417a373e8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.652879] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6091e2-0cc2-4d03-8e70-33f28e1f6189 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.661597] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b52e592-d01c-4a0c-859c-67533b8d68e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.676697] env[62204]: DEBUG nova.compute.provider_tree [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1035.844995] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52621ff0-7eae-845e-ef10-4301687606bb, 'name': SearchDatastore_Task, 'duration_secs': 0.025829} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.845746] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.845746] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 8081d981-42c4-46e4-82e7-2f8b59a68465/8081d981-42c4-46e4-82e7-2f8b59a68465.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.845900] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-000371f8-5770-4353-bcd2-875d1ede82e1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.853014] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1035.853014] env[62204]: value = "task-1200371" [ 1035.853014] env[62204]: _type = "Task" [ 1035.853014] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.860830] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200371, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.996706] env[62204]: DEBUG nova.network.neutron [-] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.010129] env[62204]: DEBUG nova.compute.manager [req-87f8ac87-2f2d-4751-b2ba-fb37dcccf6f1 req-72871989-d83f-4447-b01a-603329118cfe service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Received event network-vif-deleted-aa247f33-6bdf-4e2e-b1aa-fbd26891eff4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.010129] env[62204]: INFO nova.compute.manager [req-87f8ac87-2f2d-4751-b2ba-fb37dcccf6f1 req-72871989-d83f-4447-b01a-603329118cfe service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Neutron deleted interface aa247f33-6bdf-4e2e-b1aa-fbd26891eff4; detaching it from the instance and deleting it from the info cache [ 1036.010129] env[62204]: DEBUG nova.network.neutron [req-87f8ac87-2f2d-4751-b2ba-fb37dcccf6f1 req-72871989-d83f-4447-b01a-603329118cfe service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.083151] env[62204]: DEBUG nova.compute.manager [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Received event network-changed-043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.083416] env[62204]: DEBUG nova.compute.manager [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Refreshing instance network info cache due to event network-changed-043fbcf5-77f4-4656-91a6-01f89dd81e08. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1036.083590] env[62204]: DEBUG oslo_concurrency.lockutils [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] Acquiring lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.083754] env[62204]: DEBUG oslo_concurrency.lockutils [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] Acquired lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.083927] env[62204]: DEBUG nova.network.neutron [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Refreshing network info cache for port 043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1036.109710] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200370, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.181703] env[62204]: DEBUG nova.scheduler.client.report [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1036.364151] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200371, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502382} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.364441] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 8081d981-42c4-46e4-82e7-2f8b59a68465/8081d981-42c4-46e4-82e7-2f8b59a68465.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.364663] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.364954] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba3afd6e-84c0-4501-af12-6738e7e43e65 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.370469] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1036.370469] env[62204]: value = "task-1200373" [ 1036.370469] env[62204]: _type = "Task" [ 1036.370469] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.378131] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.480599] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.480753] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.481076] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.481336] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.481563] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.484208] env[62204]: INFO nova.compute.manager [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Terminating instance [ 1036.486298] env[62204]: DEBUG nova.compute.manager [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.486526] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.487724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7709ac-e255-4817-af1e-9ca359b2851d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.495335] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.495591] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1d6d2b5-f01a-41c9-93b7-c889c1fecb29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.497317] env[62204]: INFO nova.compute.manager [-] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Took 1.24 seconds to deallocate network for instance. [ 1036.503050] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1036.503050] env[62204]: value = "task-1200374" [ 1036.503050] env[62204]: _type = "Task" [ 1036.503050] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.510666] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.517681] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c3d6b28-2b4e-4d05-9e8b-29e4f5c08514 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.526746] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf1c270-db40-492f-9d33-3976faa19182 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.557228] env[62204]: DEBUG nova.compute.manager [req-87f8ac87-2f2d-4751-b2ba-fb37dcccf6f1 req-72871989-d83f-4447-b01a-603329118cfe service nova] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Detach interface failed, port_id=aa247f33-6bdf-4e2e-b1aa-fbd26891eff4, reason: Instance ade509d8-5d7c-4926-bb2f-067dce84f76c could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1036.610689] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200370, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.685517] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.730s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.688064] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.142s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.688291] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.717593] env[62204]: INFO nova.scheduler.client.report [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted allocations for instance 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc [ 1036.720563] env[62204]: INFO nova.scheduler.client.report [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Deleted allocations for instance d3be85d1-34b6-4b00-9740-c3abdb4b0734 [ 1036.759895] env[62204]: DEBUG nova.network.neutron [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Port a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1036.817835] env[62204]: DEBUG nova.network.neutron [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Updated VIF entry in instance network info cache for port 043fbcf5-77f4-4656-91a6-01f89dd81e08. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1036.818238] env[62204]: DEBUG nova.network.neutron [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Updating instance_info_cache with network_info: [{"id": "043fbcf5-77f4-4656-91a6-01f89dd81e08", "address": "fa:16:3e:86:0b:29", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.249", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap043fbcf5-77", "ovs_interfaceid": "043fbcf5-77f4-4656-91a6-01f89dd81e08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.880292] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118669} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.880579] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.881344] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70de85e8-0588-454d-8809-523334dce70c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.903124] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 8081d981-42c4-46e4-82e7-2f8b59a68465/8081d981-42c4-46e4-82e7-2f8b59a68465.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.903403] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31875170-2588-4a3a-bdd0-35f3bbdbacab {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.922019] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1036.922019] env[62204]: value = "task-1200375" [ 1036.922019] env[62204]: _type = "Task" [ 1036.922019] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.929954] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200375, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.005124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.005417] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.006021] env[62204]: DEBUG nova.objects.instance [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid ade509d8-5d7c-4926-bb2f-067dce84f76c {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.015559] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200374, 'name': PowerOffVM_Task, 'duration_secs': 0.170793} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.016387] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.017026] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.017026] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d4bfd03-bfdd-4776-a6bb-0bc770083b78 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.078971] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.079271] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.079464] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleting the datastore file [datastore2] f5f0c15f-ae0d-4615-93ab-3203a5d7e090 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.079753] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2193dbe3-e6d3-41cc-8036-4cec8a07ff56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.086981] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for the task: (returnval){ [ 1037.086981] env[62204]: value = "task-1200377" [ 1037.086981] env[62204]: _type = "Task" [ 1037.086981] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.095546] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.107496] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200370, 'name': CloneVM_Task, 'duration_secs': 1.155444} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.107787] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Created linked-clone VM from snapshot [ 1037.108505] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b881a991-d311-4adf-b7a2-f96dcf0feba4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.115969] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Uploading image 7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1037.141645] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1037.141645] env[62204]: value = "vm-260137" [ 1037.141645] env[62204]: _type = "VirtualMachine" [ 1037.141645] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1037.141989] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9c0312fc-e219-4a58-95f5-dc92ffc4ed68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.148714] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease: (returnval){ [ 1037.148714] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d1a1aa-6cd9-562a-5575-526307fcc766" [ 1037.148714] env[62204]: _type = "HttpNfcLease" [ 1037.148714] env[62204]: } obtained for exporting VM: (result){ [ 1037.148714] env[62204]: value = "vm-260137" [ 1037.148714] env[62204]: _type = "VirtualMachine" [ 1037.148714] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1037.148988] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the lease: (returnval){ [ 1037.148988] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d1a1aa-6cd9-562a-5575-526307fcc766" [ 1037.148988] env[62204]: _type = "HttpNfcLease" [ 1037.148988] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1037.155070] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1037.155070] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d1a1aa-6cd9-562a-5575-526307fcc766" [ 1037.155070] env[62204]: _type = "HttpNfcLease" [ 1037.155070] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1037.229685] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9ffebb15-929c-450a-9c7f-ad541be4f1dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "57e14d47-1d3f-4fed-93c1-11cfc17dc9bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.792s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.231019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-68a7d753-de93-4aed-862a-32d93f214ce1 tempest-ServerDiskConfigTestJSON-775740427 tempest-ServerDiskConfigTestJSON-775740427-project-member] Lock "d3be85d1-34b6-4b00-9740-c3abdb4b0734" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.688s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.320862] env[62204]: DEBUG oslo_concurrency.lockutils [req-1cf6d5ca-985e-4eb6-bff2-ed16984e9870 req-70067792-deab-4a86-8eed-dc34f870b561 service nova] Releasing lock "refresh_cache-c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1037.432397] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.600556] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.629062] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd99bf5-b7d9-499d-939f-f59ec9a5962b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.636180] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74d5bb9-992c-4e42-a488-da5234aba7cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.668569] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5a0c29-29bb-4eb8-a8b0-1a52bc5c15d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.674254] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1037.674254] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d1a1aa-6cd9-562a-5575-526307fcc766" [ 1037.674254] env[62204]: _type = "HttpNfcLease" [ 1037.674254] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1037.676643] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1037.676643] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52d1a1aa-6cd9-562a-5575-526307fcc766" [ 1037.676643] env[62204]: _type = "HttpNfcLease" [ 1037.676643] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1037.677478] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa04b2a-6c9a-44e9-88d1-395bca7f8fd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.680783] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c19e149-fa94-4b7f-9d8f-c39a8db83b1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.689335] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5266f003-dfd4-7a9b-d5dc-bdc2b37baf56/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1037.689546] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5266f003-dfd4-7a9b-d5dc-bdc2b37baf56/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1037.698987] env[62204]: DEBUG nova.compute.provider_tree [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1037.776043] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1037.776244] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1037.776424] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1037.813414] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7391fcb3-60be-4c06-af9d-1821b596ead8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.933305] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200375, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.110301] env[62204]: DEBUG oslo_vmware.api [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Task: {'id': task-1200377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.66621} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.110301] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1038.110301] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1038.110301] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1038.110301] env[62204]: INFO nova.compute.manager [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1038.110301] env[62204]: DEBUG oslo.service.loopingcall [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1038.110301] env[62204]: DEBUG nova.compute.manager [-] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1038.110301] env[62204]: DEBUG nova.network.neutron [-] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1038.193406] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1038.193731] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1038.201896] env[62204]: DEBUG nova.scheduler.client.report [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1038.433990] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200375, 'name': ReconfigVM_Task, 'duration_secs': 1.213122} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.434486] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 8081d981-42c4-46e4-82e7-2f8b59a68465/8081d981-42c4-46e4-82e7-2f8b59a68465.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.436503] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72d54f00-b24c-4368-842d-6052402104c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.448016] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1038.448016] env[62204]: value = "task-1200379" [ 1038.448016] env[62204]: _type = "Task" [ 1038.448016] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.453539] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200379, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.551734] env[62204]: DEBUG nova.compute.manager [req-74f4500c-25b3-4b18-9aed-d24edfa132c0 req-3bfa6e87-6b87-47f0-97ee-da205f51f71c service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Received event network-vif-deleted-5af1ae4e-3a58-4d76-854a-59ac01168a4c {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.552680] env[62204]: INFO nova.compute.manager [req-74f4500c-25b3-4b18-9aed-d24edfa132c0 req-3bfa6e87-6b87-47f0-97ee-da205f51f71c service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Neutron deleted interface 5af1ae4e-3a58-4d76-854a-59ac01168a4c; detaching it from the instance and deleting it from the info cache [ 1038.553098] env[62204]: DEBUG nova.network.neutron [req-74f4500c-25b3-4b18-9aed-d24edfa132c0 req-3bfa6e87-6b87-47f0-97ee-da205f51f71c service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.697129] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1038.708982] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.703s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1038.742579] env[62204]: INFO nova.scheduler.client.report [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance ade509d8-5d7c-4926-bb2f-067dce84f76c [ 1038.830838] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.831164] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.831405] env[62204]: DEBUG nova.network.neutron [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1038.954476] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200379, 'name': Rename_Task, 'duration_secs': 0.253233} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.954860] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.955218] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10517122-dcac-449e-bc6e-d97ea910d922 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.961071] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1038.961071] env[62204]: value = "task-1200380" [ 1038.961071] env[62204]: _type = "Task" [ 1038.961071] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.968656] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.030939] env[62204]: DEBUG nova.network.neutron [-] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.057963] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec9bb2c1-3906-42db-8413-ad05b5521e58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.068575] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceec265b-dc52-4357-9933-ea939230b308 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.099014] env[62204]: DEBUG nova.compute.manager [req-74f4500c-25b3-4b18-9aed-d24edfa132c0 req-3bfa6e87-6b87-47f0-97ee-da205f51f71c service nova] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Detach interface failed, port_id=5af1ae4e-3a58-4d76-854a-59ac01168a4c, reason: Instance f5f0c15f-ae0d-4615-93ab-3203a5d7e090 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1039.221815] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.222064] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.223875] env[62204]: INFO nova.compute.claims [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.259021] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a3feb7f1-439b-473f-9626-3a02bc0c3e9a tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "ade509d8-5d7c-4926-bb2f-067dce84f76c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.118s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1039.482405] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200380, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.535863] env[62204]: INFO nova.compute.manager [-] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Took 1.43 seconds to deallocate network for instance. [ 1039.622079] env[62204]: DEBUG nova.network.neutron [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.977916] env[62204]: DEBUG oslo_vmware.api [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200380, 'name': PowerOnVM_Task, 'duration_secs': 0.634373} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.978209] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.978809] env[62204]: INFO nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1039.978809] env[62204]: DEBUG nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1039.979622] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd74b0d-71ce-4e3a-8d33-24bc09b0ab74 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.043391] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.128090] env[62204]: DEBUG oslo_concurrency.lockutils [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1040.358076] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21308e07-5a9f-4a24-8c12-92435bf90802 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.365981] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c926da13-2fcd-4e74-9b57-baab9d5c27ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.398241] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c394c5-d6bf-48cd-9f76-aa844306cfe6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.405507] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cece47d1-8902-4bc4-b771-05b416051bf3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.420228] env[62204]: DEBUG nova.compute.provider_tree [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.499051] env[62204]: INFO nova.compute.manager [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Took 18.84 seconds to build instance. [ 1040.569618] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1040.569963] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.636441] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaaab45-1e88-4bf2-b07e-6cc54cf64ffe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.644754] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ab8b00-dea7-4e9f-a438-baa26e2c2e14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.923506] env[62204]: DEBUG nova.scheduler.client.report [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1041.001780] env[62204]: DEBUG oslo_concurrency.lockutils [None req-bd7dbe18-7b5d-4170-b7c4-a8544bf0160c tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.353s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.007071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.007071] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.072366] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1041.220411] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd407b9b-149e-4c37-91d5-4645def409fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.227491] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Suspending the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1041.229682] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-a95e7e8f-2320-4408-b675-2ecd7cef954f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.236149] env[62204]: DEBUG oslo_vmware.api [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1041.236149] env[62204]: value = "task-1200382" [ 1041.236149] env[62204]: _type = "Task" [ 1041.236149] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.244368] env[62204]: DEBUG oslo_vmware.api [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200382, 'name': SuspendVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.428837] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.429623] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1041.432749] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.390s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1041.433074] env[62204]: DEBUG nova.objects.instance [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lazy-loading 'resources' on Instance uuid f5f0c15f-ae0d-4615-93ab-3203a5d7e090 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.509055] env[62204]: DEBUG nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1041.597868] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1041.748035] env[62204]: DEBUG oslo_vmware.api [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200382, 'name': SuspendVM_Task} progress is 62%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.750260] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb87d41e-6151-42ef-99fe-312a8fe88890 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.773043] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71df45f6-a4a0-4cc1-88f3-1371d4d77a26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.781025] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1041.937890] env[62204]: DEBUG nova.compute.utils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1041.941942] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1041.941942] env[62204]: DEBUG nova.network.neutron [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1042.022474] env[62204]: DEBUG nova.policy [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6db3ba1bb9b9464d870969f1f7d95a9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ef8dc436e4b45d0a8d50468666358e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1042.035402] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.114611] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b4891a-d516-4c82-b5a8-5f49e90cd504 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.123188] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e475d475-e80b-46fe-b823-833bc3024e7e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.155021] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9278ef-f830-4f47-9e19-ff2565422a9e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.162870] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f34d8c-8be8-4070-87b7-18ce70665cf2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.178917] env[62204]: DEBUG nova.compute.provider_tree [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.246425] env[62204]: DEBUG oslo_vmware.api [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200382, 'name': SuspendVM_Task, 'duration_secs': 0.875846} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.246689] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Suspended the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1042.246874] env[62204]: DEBUG nova.compute.manager [None req-52f90f16-6b46-4365-8256-a5332b992411 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.247685] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1c2ebd-9018-498d-b2fc-d735e00cde5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.289521] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.290505] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-912240a8-82e3-4596-b71f-c8d33805a600 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.298026] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1042.298026] env[62204]: value = "task-1200383" [ 1042.298026] env[62204]: _type = "Task" [ 1042.298026] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.305954] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.442847] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1042.550312] env[62204]: DEBUG nova.network.neutron [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Successfully created port: 9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1042.681108] env[62204]: DEBUG nova.scheduler.client.report [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.812074] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200383, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.190039] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.755s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.191352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.593s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.198671] env[62204]: INFO nova.compute.claims [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1043.214293] env[62204]: INFO nova.scheduler.client.report [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Deleted allocations for instance f5f0c15f-ae0d-4615-93ab-3203a5d7e090 [ 1043.318167] env[62204]: DEBUG oslo_vmware.api [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200383, 'name': PowerOnVM_Task, 'duration_secs': 0.555045} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.318493] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.318695] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-e96a5100-6731-467b-9d16-8c74c13fdfd3 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance 'a93880fc-e517-4d83-98c1-9ce2405bf9d5' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1043.455550] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1043.484929] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1043.485275] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1043.485491] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1043.485677] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1043.485957] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1043.486032] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1043.487556] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1043.487886] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1043.488153] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1043.488393] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1043.488595] env[62204]: DEBUG nova.virt.hardware [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.489945] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652ac50a-4b0f-491b-9ab3-70d3e2fa4776 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.499413] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a916a2e3-3769-40c5-9cd0-a3ae313b82e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.727383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-594ac5c2-f980-42bf-8672-44e10ca9492a tempest-ServerRescueNegativeTestJSON-279322282 tempest-ServerRescueNegativeTestJSON-279322282-project-member] Lock "f5f0c15f-ae0d-4615-93ab-3203a5d7e090" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.245s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.920719] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "8081d981-42c4-46e4-82e7-2f8b59a68465" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.921061] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.922616] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "8081d981-42c4-46e4-82e7-2f8b59a68465-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.922616] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.922616] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.930164] env[62204]: INFO nova.compute.manager [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Terminating instance [ 1043.932465] env[62204]: DEBUG nova.compute.manager [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1043.932677] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.933563] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561b3027-9fd9-4088-8bd2-826139b29909 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.942283] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.942381] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83a79d75-13ab-4e79-9ba0-2b2403235df6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.011237] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.011495] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.011688] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleting the datastore file [datastore1] 8081d981-42c4-46e4-82e7-2f8b59a68465 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.012009] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1d99c59-bffb-4ab0-9f3a-40f2188f657c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.018326] env[62204]: DEBUG oslo_vmware.api [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1044.018326] env[62204]: value = "task-1200385" [ 1044.018326] env[62204]: _type = "Task" [ 1044.018326] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.028662] env[62204]: DEBUG oslo_vmware.api [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.160335] env[62204]: DEBUG nova.compute.manager [req-44310e15-631a-4cb8-8eb1-68f2e85885c2 req-30b6f0d3-daca-40a8-8f0c-a09b310d9a76 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Received event network-vif-plugged-9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.160804] env[62204]: DEBUG oslo_concurrency.lockutils [req-44310e15-631a-4cb8-8eb1-68f2e85885c2 req-30b6f0d3-daca-40a8-8f0c-a09b310d9a76 service nova] Acquiring lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.161269] env[62204]: DEBUG oslo_concurrency.lockutils [req-44310e15-631a-4cb8-8eb1-68f2e85885c2 req-30b6f0d3-daca-40a8-8f0c-a09b310d9a76 service nova] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.161642] env[62204]: DEBUG oslo_concurrency.lockutils [req-44310e15-631a-4cb8-8eb1-68f2e85885c2 req-30b6f0d3-daca-40a8-8f0c-a09b310d9a76 service nova] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.162025] env[62204]: DEBUG nova.compute.manager [req-44310e15-631a-4cb8-8eb1-68f2e85885c2 req-30b6f0d3-daca-40a8-8f0c-a09b310d9a76 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] No waiting events found dispatching network-vif-plugged-9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1044.162427] env[62204]: WARNING nova.compute.manager [req-44310e15-631a-4cb8-8eb1-68f2e85885c2 req-30b6f0d3-daca-40a8-8f0c-a09b310d9a76 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Received unexpected event network-vif-plugged-9d3af8f6-e075-441b-b191-6617ea2a18a4 for instance with vm_state building and task_state spawning. [ 1044.427807] env[62204]: DEBUG nova.network.neutron [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Successfully updated port: 9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.457696] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b126d068-a64b-4e34-b951-ffc789d0fa3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.465721] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a81ad86-9283-4f37-94f2-de22076abc40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.496615] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce108aee-9287-4803-96eb-d3203596ab8d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.504169] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675a9963-1e1f-4d55-bf9a-c400c1f085cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.518028] env[62204]: DEBUG nova.compute.provider_tree [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.526694] env[62204]: DEBUG oslo_vmware.api [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187964} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.527579] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.527759] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.527966] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.528193] env[62204]: INFO nova.compute.manager [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1044.528521] env[62204]: DEBUG oslo.service.loopingcall [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.528740] env[62204]: DEBUG nova.compute.manager [-] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1044.528740] env[62204]: DEBUG nova.network.neutron [-] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1044.931137] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1044.931336] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.931537] env[62204]: DEBUG nova.network.neutron [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.023571] env[62204]: DEBUG nova.scheduler.client.report [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1045.288916] env[62204]: DEBUG nova.network.neutron [-] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.315815] env[62204]: DEBUG nova.network.neutron [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Port a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1045.316332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.316332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.316524] env[62204]: DEBUG nova.network.neutron [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.472073] env[62204]: DEBUG nova.network.neutron [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1045.530335] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.530335] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1045.532935] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.498s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.535436] env[62204]: INFO nova.compute.claims [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.628172] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5266f003-dfd4-7a9b-d5dc-bdc2b37baf56/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1045.630920] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdd9481-655d-4104-b3a1-87e827407f5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.639020] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5266f003-dfd4-7a9b-d5dc-bdc2b37baf56/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1045.639020] env[62204]: ERROR oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5266f003-dfd4-7a9b-d5dc-bdc2b37baf56/disk-0.vmdk due to incomplete transfer. [ 1045.639020] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b0a89057-d04e-430f-89e3-5e0b4236ebc5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.642102] env[62204]: DEBUG nova.network.neutron [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.644864] env[62204]: DEBUG oslo_vmware.rw_handles [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5266f003-dfd4-7a9b-d5dc-bdc2b37baf56/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1045.645254] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Uploaded image 7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1045.648456] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1045.648932] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e440a8d7-5bbb-413e-ad7d-40eef817fc84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.659564] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1045.659564] env[62204]: value = "task-1200386" [ 1045.659564] env[62204]: _type = "Task" [ 1045.659564] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.666153] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200386, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.791883] env[62204]: INFO nova.compute.manager [-] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Took 1.26 seconds to deallocate network for instance. [ 1046.039919] env[62204]: DEBUG nova.compute.utils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1046.041626] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1046.041626] env[62204]: DEBUG nova.network.neutron [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1046.054196] env[62204]: DEBUG nova.network.neutron [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.110188] env[62204]: DEBUG nova.policy [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1046.150075] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.150374] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Instance network_info: |[{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1046.151014] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:e0:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d3af8f6-e075-441b-b191-6617ea2a18a4', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.163880] env[62204]: DEBUG oslo.service.loopingcall [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1046.168460] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1046.171930] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0082ba74-1e4f-4012-9083-9a529c4d5232 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.193286] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200386, 'name': Destroy_Task, 'duration_secs': 0.335212} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.194514] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Destroyed the VM [ 1046.194779] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1046.195034] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.195034] env[62204]: value = "task-1200387" [ 1046.195034] env[62204]: _type = "Task" [ 1046.195034] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.195231] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b62bc4ec-8e5b-48e4-a93a-9b977f3a1121 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.206378] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200387, 'name': CreateVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.208278] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1046.208278] env[62204]: value = "task-1200388" [ 1046.208278] env[62204]: _type = "Task" [ 1046.208278] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.220523] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200388, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.287739] env[62204]: DEBUG nova.compute.manager [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Received event network-changed-9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1046.287827] env[62204]: DEBUG nova.compute.manager [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Refreshing instance network info cache due to event network-changed-9d3af8f6-e075-441b-b191-6617ea2a18a4. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1046.288262] env[62204]: DEBUG oslo_concurrency.lockutils [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.288262] env[62204]: DEBUG oslo_concurrency.lockutils [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.288385] env[62204]: DEBUG nova.network.neutron [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Refreshing network info cache for port 9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1046.301690] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.547348] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1046.557788] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.607395] env[62204]: DEBUG nova.network.neutron [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Successfully created port: 61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.689732] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91365780-4564-4b51-944c-18d489773357 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.702470] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422cccf1-5391-429e-831b-b867e97beb66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.713208] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200387, 'name': CreateVM_Task, 'duration_secs': 0.339519} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.737399] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1046.738643] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.738809] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.739143] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.739847] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf6e17f-3f6f-4980-bcf4-7c5017533697 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.743058] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edb8873e-ddd6-4b17-b6e8-80d05a05317f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.748760] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200388, 'name': RemoveSnapshot_Task, 'duration_secs': 0.354706} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.749405] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1046.749764] env[62204]: DEBUG nova.compute.manager [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1046.752765] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde10285-936b-482c-9daa-0b5c93e2f648 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.757772] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045e8cf4-7d1a-4b9c-9c43-d5c5bf227ac7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.761837] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1046.761837] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52947cd0-6d0a-ed28-0033-3d284c8b9ea6" [ 1046.761837] env[62204]: _type = "Task" [ 1046.761837] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.775638] env[62204]: DEBUG nova.compute.provider_tree [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.783877] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52947cd0-6d0a-ed28-0033-3d284c8b9ea6, 'name': SearchDatastore_Task, 'duration_secs': 0.009885} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.788036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.788036] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.788036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.788036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.788036] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1046.788036] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-789dca7a-8765-48d2-b155-0197ebb99982 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.794105] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1046.794287] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1046.794969] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0f2f941-1a21-4fd0-afac-f077be4330f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.800062] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1046.800062] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525dd31b-dfbf-a0b1-eb3b-96fb61b9692b" [ 1046.800062] env[62204]: _type = "Task" [ 1046.800062] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.807785] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525dd31b-dfbf-a0b1-eb3b-96fb61b9692b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.994242] env[62204]: DEBUG nova.network.neutron [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updated VIF entry in instance network info cache for port 9d3af8f6-e075-441b-b191-6617ea2a18a4. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1046.994645] env[62204]: DEBUG nova.network.neutron [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.063272] env[62204]: DEBUG nova.compute.manager [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62204) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1047.280073] env[62204]: DEBUG nova.scheduler.client.report [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1047.286221] env[62204]: INFO nova.compute.manager [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Shelve offloading [ 1047.288494] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.288494] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c784348-e884-40b9-9d89-7ecfc86616ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.296553] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1047.296553] env[62204]: value = "task-1200389" [ 1047.296553] env[62204]: _type = "Task" [ 1047.296553] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.307391] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1047.307625] env[62204]: DEBUG nova.compute.manager [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1047.308378] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c241db-028b-44bc-83d9-11757c7d947b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.313889] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525dd31b-dfbf-a0b1-eb3b-96fb61b9692b, 'name': SearchDatastore_Task, 'duration_secs': 0.007739} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.315202] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edd71a9c-f23b-41c2-be4e-9541e276aa34 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.318955] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1047.318955] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.319116] env[62204]: DEBUG nova.network.neutron [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1047.323270] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1047.323270] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525eb175-6fab-cb1d-592a-3a8a9d8d1598" [ 1047.323270] env[62204]: _type = "Task" [ 1047.323270] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.330470] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525eb175-6fab-cb1d-592a-3a8a9d8d1598, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.497718] env[62204]: DEBUG oslo_concurrency.lockutils [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.498030] env[62204]: DEBUG nova.compute.manager [req-7f155073-a9be-4fe2-bd8a-1a5e20b33a31 req-d528d228-81aa-43a6-962f-c14ad56e41e1 service nova] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Received event network-vif-deleted-765c298f-6dcb-4b6e-8b1b-f62c31fd21de {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1047.554475] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1047.579706] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1047.579949] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1047.580130] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.580322] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1047.580472] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.580622] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1047.580828] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1047.580989] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1047.581184] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1047.581346] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1047.581520] env[62204]: DEBUG nova.virt.hardware [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1047.582365] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80ec122-4529-4446-b2ca-f68995c56acf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.590859] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedae1e8-fbc0-48dc-9535-2d75d0c36b47 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.786830] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.787429] env[62204]: DEBUG nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1047.790090] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.489s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1047.790320] env[62204]: DEBUG nova.objects.instance [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'resources' on Instance uuid 8081d981-42c4-46e4-82e7-2f8b59a68465 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.833262] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]525eb175-6fab-cb1d-592a-3a8a9d8d1598, 'name': SearchDatastore_Task, 'duration_secs': 0.009008} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.833538] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.833841] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1047.834068] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b042f481-4af3-4b77-9b90-d8b214da87bb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.841574] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1047.841574] env[62204]: value = "task-1200390" [ 1047.841574] env[62204]: _type = "Task" [ 1047.841574] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.849434] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200390, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.086489] env[62204]: DEBUG nova.network.neutron [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.171130] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.205978] env[62204]: DEBUG nova.compute.manager [req-87d8768d-852b-4079-8ccf-b60cc8214af2 req-22a58982-5766-400c-a7e6-5f113c298e30 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Received event network-vif-plugged-61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1048.206334] env[62204]: DEBUG oslo_concurrency.lockutils [req-87d8768d-852b-4079-8ccf-b60cc8214af2 req-22a58982-5766-400c-a7e6-5f113c298e30 service nova] Acquiring lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1048.206563] env[62204]: DEBUG oslo_concurrency.lockutils [req-87d8768d-852b-4079-8ccf-b60cc8214af2 req-22a58982-5766-400c-a7e6-5f113c298e30 service nova] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1048.206821] env[62204]: DEBUG oslo_concurrency.lockutils [req-87d8768d-852b-4079-8ccf-b60cc8214af2 req-22a58982-5766-400c-a7e6-5f113c298e30 service nova] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.207347] env[62204]: DEBUG nova.compute.manager [req-87d8768d-852b-4079-8ccf-b60cc8214af2 req-22a58982-5766-400c-a7e6-5f113c298e30 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] No waiting events found dispatching network-vif-plugged-61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1048.207678] env[62204]: WARNING nova.compute.manager [req-87d8768d-852b-4079-8ccf-b60cc8214af2 req-22a58982-5766-400c-a7e6-5f113c298e30 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Received unexpected event network-vif-plugged-61c7f8ce-1fd0-468f-be5b-df93ccf11da1 for instance with vm_state building and task_state spawning. [ 1048.293307] env[62204]: DEBUG nova.compute.utils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1048.300548] env[62204]: DEBUG nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Not allocating networking since 'none' was specified. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1048.354244] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200390, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488855} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.354634] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1048.354900] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1048.355459] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af97e725-1310-4c83-aecf-c8f064eefdf7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.361769] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1048.361769] env[62204]: value = "task-1200391" [ 1048.361769] env[62204]: _type = "Task" [ 1048.361769] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.370515] env[62204]: DEBUG nova.network.neutron [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Successfully updated port: 61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.375187] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200391, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.457817] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8532be6-8e08-49e5-be91-2a5eb2b47e57 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.466380] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db8d04c-7965-4135-bbc5-ecd3b1f84fde {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.498750] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f703a26a-7764-40cf-b370-20f6cd029f2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.506139] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad7bfa7-da2c-4e50-9bf1-c1c22db08834 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.519963] env[62204]: DEBUG nova.compute.provider_tree [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.589942] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1048.800673] env[62204]: DEBUG nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1048.871278] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200391, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.207706} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.871556] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1048.872407] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584b535f-8c9b-48cc-bb1d-c9bb1f83edbb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.877243] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.877287] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.877406] env[62204]: DEBUG nova.network.neutron [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1048.896089] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.897322] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e026b05-9b69-42c2-bc67-dbf3d191b5d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.913452] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.914442] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bc676c-5b2c-4952-9060-1cf083cc9e37 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.923678] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1048.926217] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50d1a068-7da6-4820-a1e6-15f957a46539 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.929326] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1048.929326] env[62204]: value = "task-1200392" [ 1048.929326] env[62204]: _type = "Task" [ 1048.929326] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.941333] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200392, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.022905] env[62204]: DEBUG nova.scheduler.client.report [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1049.027905] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.028187] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.028415] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleting the datastore file [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.028988] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c26582e-ecc2-4e90-88a2-c69e75d3c394 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.036532] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1049.036532] env[62204]: value = "task-1200394" [ 1049.036532] env[62204]: _type = "Task" [ 1049.036532] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.051429] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.427998] env[62204]: DEBUG nova.network.neutron [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1049.438532] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200392, 'name': ReconfigVM_Task, 'duration_secs': 0.492221} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.438810] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.439661] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5470c5d-5ec1-415c-9778-2c2391b0d965 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.446411] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1049.446411] env[62204]: value = "task-1200395" [ 1049.446411] env[62204]: _type = "Task" [ 1049.446411] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.455205] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200395, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.530383] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.740s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1049.535184] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 1.364s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.546599] env[62204]: DEBUG oslo_vmware.api [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269635} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.547395] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.547395] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.547395] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.562219] env[62204]: INFO nova.scheduler.client.report [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocations for instance 8081d981-42c4-46e4-82e7-2f8b59a68465 [ 1049.567535] env[62204]: INFO nova.scheduler.client.report [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted allocations for instance 3258243e-a9df-4b3e-a6bd-17e3b2168efe [ 1049.608286] env[62204]: DEBUG nova.network.neutron [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Updating instance_info_cache with network_info: [{"id": "61c7f8ce-1fd0-468f-be5b-df93ccf11da1", "address": "fa:16:3e:52:6a:cb", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61c7f8ce-1f", "ovs_interfaceid": "61c7f8ce-1fd0-468f-be5b-df93ccf11da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.811158] env[62204]: DEBUG nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1049.835428] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1049.835672] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1049.835832] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.836033] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1049.836191] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.836342] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1049.836554] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1049.836719] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1049.836887] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1049.837098] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1049.837294] env[62204]: DEBUG nova.virt.hardware [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1049.838183] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f18891-7fb6-4d00-adc0-06199b7d4c8e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.845526] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5100b9-0a2f-4dfe-a5b0-53ec34fb8884 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.858560] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Instance VIF info [] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.863876] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Creating folder: Project (afb1fe59e8054ccc999febd125ee73d4). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1049.864156] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b4edbb2-ce13-4130-ac5e-b5d0af2f70b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.873884] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Created folder: Project (afb1fe59e8054ccc999febd125ee73d4) in parent group-v259933. [ 1049.874088] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Creating folder: Instances. Parent ref: group-v260139. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1049.874321] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f540f10-83ec-4bef-8112-ceb280bd8f29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.884224] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Created folder: Instances in parent group-v260139. [ 1049.884449] env[62204]: DEBUG oslo.service.loopingcall [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1049.884630] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.884818] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f89f5a0c-ca4b-49c0-b3f6-740df713777c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.900574] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.900574] env[62204]: value = "task-1200398" [ 1049.900574] env[62204]: _type = "Task" [ 1049.900574] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.907342] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200398, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.957757] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200395, 'name': Rename_Task, 'duration_secs': 0.134992} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.958125] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.958422] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-400d7cbb-7674-481c-a608-cfccf58bde23 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.964863] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1049.964863] env[62204]: value = "task-1200399" [ 1049.964863] env[62204]: _type = "Task" [ 1049.964863] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.975384] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.038401] env[62204]: DEBUG nova.objects.instance [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'migration_context' on Instance uuid a93880fc-e517-4d83-98c1-9ce2405bf9d5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.069528] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c2764d7-573d-4173-99e9-1063be91c02a tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "8081d981-42c4-46e4-82e7-2f8b59a68465" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.148s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.071273] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.111505] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.112899] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Instance network_info: |[{"id": "61c7f8ce-1fd0-468f-be5b-df93ccf11da1", "address": "fa:16:3e:52:6a:cb", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61c7f8ce-1f", "ovs_interfaceid": "61c7f8ce-1fd0-468f-be5b-df93ccf11da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1050.112899] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:6a:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61c7f8ce-1fd0-468f-be5b-df93ccf11da1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1050.121822] env[62204]: DEBUG oslo.service.loopingcall [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1050.122466] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1050.122689] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a706ac8-2262-4c91-8d6b-64e24be27e4f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.142698] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1050.142698] env[62204]: value = "task-1200400" [ 1050.142698] env[62204]: _type = "Task" [ 1050.142698] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.151820] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200400, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.285047] env[62204]: DEBUG nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Received event network-changed-61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1050.285325] env[62204]: DEBUG nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Refreshing instance network info cache due to event network-changed-61c7f8ce-1fd0-468f-be5b-df93ccf11da1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1050.285600] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Acquiring lock "refresh_cache-2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.285799] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Acquired lock "refresh_cache-2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.286022] env[62204]: DEBUG nova.network.neutron [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Refreshing network info cache for port 61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1050.412811] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200398, 'name': CreateVM_Task, 'duration_secs': 0.288342} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.413055] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1050.413544] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.413748] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.414224] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1050.414613] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1584800d-401f-496f-b905-a4ca6489e1aa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.420108] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1050.420108] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f257a4-7f13-57c8-e500-da0c163a7bdc" [ 1050.420108] env[62204]: _type = "Task" [ 1050.420108] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.430107] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f257a4-7f13-57c8-e500-da0c163a7bdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.475210] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200399, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.656033] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200400, 'name': CreateVM_Task, 'duration_secs': 0.358601} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.658466] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1050.659284] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.669238] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3f624e-3536-4011-90ab-1a3f386911f1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.676278] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53fa8d0-8f79-468b-b8f5-f35d20710a0d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.707960] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8f772b-a4d5-4414-a208-04d9a272a7c4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.715449] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c17e12-fe4e-4577-b0bd-38e0938589f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.728313] env[62204]: DEBUG nova.compute.provider_tree [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.931354] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52f257a4-7f13-57c8-e500-da0c163a7bdc, 'name': SearchDatastore_Task, 'duration_secs': 0.011864} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.931661] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1050.931900] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.932181] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1050.932340] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.932520] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.932794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.933120] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1050.933351] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7d5cf8d-1c54-4c3f-ae7e-d8e525a60646 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.935066] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c293e279-372f-48e2-a62e-ae01105d0bed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.939610] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1050.939610] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a6ea80-cba9-e609-5916-0131d4e04bc2" [ 1050.939610] env[62204]: _type = "Task" [ 1050.939610] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.943285] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.943461] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.944445] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93aada2c-ec62-4c02-aafc-5900d2168640 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.948934] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a6ea80-cba9-e609-5916-0131d4e04bc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.953233] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1050.953233] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52317568-3807-6161-88c2-6515af2e1c6a" [ 1050.953233] env[62204]: _type = "Task" [ 1050.953233] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.960150] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52317568-3807-6161-88c2-6515af2e1c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.972986] env[62204]: DEBUG oslo_vmware.api [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200399, 'name': PowerOnVM_Task, 'duration_secs': 0.53258} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.973216] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.973417] env[62204]: INFO nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Took 7.52 seconds to spawn the instance on the hypervisor. [ 1050.973596] env[62204]: DEBUG nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1050.974309] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57605e5c-9947-422c-aa0c-581e447bab4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.982737] env[62204]: DEBUG nova.network.neutron [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Updated VIF entry in instance network info cache for port 61c7f8ce-1fd0-468f-be5b-df93ccf11da1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1050.983063] env[62204]: DEBUG nova.network.neutron [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Updating instance_info_cache with network_info: [{"id": "61c7f8ce-1fd0-468f-be5b-df93ccf11da1", "address": "fa:16:3e:52:6a:cb", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61c7f8ce-1f", "ovs_interfaceid": "61c7f8ce-1fd0-468f-be5b-df93ccf11da1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.231625] env[62204]: DEBUG nova.scheduler.client.report [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1051.311487] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.311598] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.451094] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a6ea80-cba9-e609-5916-0131d4e04bc2, 'name': SearchDatastore_Task, 'duration_secs': 0.016126} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.451492] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.451740] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1051.451946] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.462907] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52317568-3807-6161-88c2-6515af2e1c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.01798} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.463458] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3f14318-7604-4014-88df-ede5e9f99c68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.468766] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1051.468766] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bab8ac-5d0c-93ee-8f27-9cb2f3408520" [ 1051.468766] env[62204]: _type = "Task" [ 1051.468766] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.476796] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bab8ac-5d0c-93ee-8f27-9cb2f3408520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.489295] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Releasing lock "refresh_cache-2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.489512] env[62204]: DEBUG nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-vif-unplugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.492684] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1051.492684] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1051.492684] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1051.492684] env[62204]: DEBUG nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] No waiting events found dispatching network-vif-unplugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1051.492684] env[62204]: WARNING nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received unexpected event network-vif-unplugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 for instance with vm_state shelved_offloaded and task_state None. [ 1051.492684] env[62204]: DEBUG nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1051.492684] env[62204]: DEBUG nova.compute.manager [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing instance network info cache due to event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1051.492684] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1051.492684] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.492684] env[62204]: DEBUG nova.network.neutron [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1051.494672] env[62204]: INFO nova.compute.manager [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Took 12.29 seconds to build instance. [ 1051.813631] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1051.979642] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bab8ac-5d0c-93ee-8f27-9cb2f3408520, 'name': SearchDatastore_Task, 'duration_secs': 0.032567} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.979847] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1051.980131] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c/5a3e46f9-ccf9-444e-89e1-6ca46c63d25c.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1051.980423] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.980616] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1051.980836] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1bc60755-a23e-40f3-82b5-7eff1e095388 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.982752] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3cef4c50-ef02-4dbd-92ab-9ade618534ce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.989663] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1051.989663] env[62204]: value = "task-1200401" [ 1051.989663] env[62204]: _type = "Task" [ 1051.989663] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.990800] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1051.990950] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1051.994471] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b387e49-7289-48a5-b9d3-8c17e36cb37e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.998261] env[62204]: DEBUG oslo_concurrency.lockutils [None req-af946821-40ae-47e1-9044-c0d199f38168 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.805s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.001273] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1052.001273] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c0fcc5-cc2c-68a4-7e93-fb1c02c6c3c8" [ 1052.001273] env[62204]: _type = "Task" [ 1052.001273] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.004767] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.012741] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52c0fcc5-cc2c-68a4-7e93-fb1c02c6c3c8, 'name': SearchDatastore_Task, 'duration_secs': 0.007144} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.013493] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0bbaf44-a0c8-45fe-8098-f0f0ad275880 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.018387] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1052.018387] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52027e9e-988a-caa9-0738-57aa912ddaad" [ 1052.018387] env[62204]: _type = "Task" [ 1052.018387] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.025484] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52027e9e-988a-caa9-0738-57aa912ddaad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.243036] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.708s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.249128] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.178s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.249502] env[62204]: DEBUG nova.objects.instance [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'resources' on Instance uuid 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.277120] env[62204]: DEBUG nova.network.neutron [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updated VIF entry in instance network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1052.277555] env[62204]: DEBUG nova.network.neutron [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap59c7be21-51", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.346112] env[62204]: DEBUG nova.compute.manager [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Received event network-changed-9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1052.346112] env[62204]: DEBUG nova.compute.manager [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Refreshing instance network info cache due to event network-changed-9d3af8f6-e075-441b-b191-6617ea2a18a4. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1052.346112] env[62204]: DEBUG oslo_concurrency.lockutils [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.346112] env[62204]: DEBUG oslo_concurrency.lockutils [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.346112] env[62204]: DEBUG nova.network.neutron [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Refreshing network info cache for port 9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1052.368961] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.371611] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1052.501554] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453204} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.501797] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c/5a3e46f9-ccf9-444e-89e1-6ca46c63d25c.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1052.502037] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1052.502293] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5046c343-58d9-43b1-b267-d8ec5cee36e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.509928] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1052.509928] env[62204]: value = "task-1200402" [ 1052.509928] env[62204]: _type = "Task" [ 1052.509928] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.517966] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200402, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.527462] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52027e9e-988a-caa9-0738-57aa912ddaad, 'name': SearchDatastore_Task, 'duration_secs': 0.008299} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.527755] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1052.528072] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06/2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1052.528355] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d8cce9a-1e6e-4e02-910e-eb8f4d71df96 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.533869] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1052.533869] env[62204]: value = "task-1200403" [ 1052.533869] env[62204]: _type = "Task" [ 1052.533869] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.541857] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.756900] env[62204]: DEBUG nova.objects.instance [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'numa_topology' on Instance uuid 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.780123] env[62204]: DEBUG oslo_concurrency.lockutils [req-5396feab-299a-44ce-b315-69bfd295ecc3 req-3d08b1c3-4479-4066-b66d-9b3a52893075 service nova] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.022748] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200402, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066001} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.023093] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.023788] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398220a1-a8a1-413f-9604-293d6e427558 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.043842] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c/5a3e46f9-ccf9-444e-89e1-6ca46c63d25c.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.047268] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3248a974-46d5-46d8-adf1-a636fa421659 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.066484] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.429885} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.067797] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06/2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1053.068032] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1053.068381] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1053.068381] env[62204]: value = "task-1200404" [ 1053.068381] env[62204]: _type = "Task" [ 1053.068381] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.068579] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-805431cc-7538-4b50-8503-dc3c4633df29 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.078547] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200404, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.079774] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1053.079774] env[62204]: value = "task-1200405" [ 1053.079774] env[62204]: _type = "Task" [ 1053.079774] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.090379] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.134131] env[62204]: DEBUG nova.network.neutron [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updated VIF entry in instance network info cache for port 9d3af8f6-e075-441b-b191-6617ea2a18a4. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1053.134552] env[62204]: DEBUG nova.network.neutron [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.265513] env[62204]: DEBUG nova.objects.base [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Object Instance<3258243e-a9df-4b3e-a6bd-17e3b2168efe> lazy-loaded attributes: resources,numa_topology {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1053.385689] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73282d4c-3eec-4def-a474-5d34c8c68348 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.393341] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f2d6c4-6eab-43c7-9a23-a697950790cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.423994] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68183ecb-3358-4ed2-8d54-9a2e92e2e8b4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.430813] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e665fa-b832-4df1-aa33-5d1c3d05d397 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.443644] env[62204]: DEBUG nova.compute.provider_tree [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.580228] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200404, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.588393] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121575} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.588661] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.589500] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40df3c9-460b-4e00-8be9-8910dae3bf53 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.610769] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06/2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.611057] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54c6b827-16b1-46fe-8796-35893737c4c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.629591] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1053.629591] env[62204]: value = "task-1200406" [ 1053.629591] env[62204]: _type = "Task" [ 1053.629591] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.637612] env[62204]: DEBUG oslo_concurrency.lockutils [req-cd42b1d2-528c-4692-ad88-dccd71297325 req-fdd4ab4c-78d0-4c6a-ae56-063e45e0a9f5 service nova] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.637983] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200406, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.784660] env[62204]: INFO nova.compute.manager [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Swapping old allocation on dict_keys(['92e8f362-5134-40c6-9a5c-0b8f64197972']) held by migration b845b6d3-628e-4d2f-927c-4121550b3604 for instance [ 1053.808040] env[62204]: DEBUG nova.scheduler.client.report [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Overwriting current allocation {'allocations': {'92e8f362-5134-40c6-9a5c-0b8f64197972': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 142}}, 'project_id': 'd93f6aa3eaad4c5b91b657e75854f45f', 'user_id': '52fc19cbbaf14319a258f952c739c137', 'consumer_generation': 1} on consumer a93880fc-e517-4d83-98c1-9ce2405bf9d5 {{(pid=62204) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1053.881387] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1053.881636] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1053.881824] env[62204]: DEBUG nova.network.neutron [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1053.946661] env[62204]: DEBUG nova.scheduler.client.report [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1054.080782] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200404, 'name': ReconfigVM_Task, 'duration_secs': 0.949308} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.081255] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c/5a3e46f9-ccf9-444e-89e1-6ca46c63d25c.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.081670] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-debfd6c8-a1d6-4f7a-bd5e-95fda77436e0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.088306] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1054.088306] env[62204]: value = "task-1200407" [ 1054.088306] env[62204]: _type = "Task" [ 1054.088306] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.098202] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200407, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.139243] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200406, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.347090] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.347387] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.347541] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1054.347716] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Rebuilding the list of instances to heal {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1054.451595] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.202s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.454059] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.085s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.455607] env[62204]: INFO nova.compute.claims [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.593870] env[62204]: DEBUG nova.network.neutron [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [{"id": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "address": "fa:16:3e:8d:31:3c", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7dcd5c1-45", "ovs_interfaceid": "a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1054.600425] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200407, 'name': Rename_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.639956] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200406, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.853010] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1054.853292] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Skipping network cache update for instance because it is Building. {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 1054.894082] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.894377] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquired lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.894544] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Forcefully refreshing network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1054.894800] env[62204]: DEBUG nova.objects.instance [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lazy-loading 'info_cache' on Instance uuid 98805916-8501-4afb-9e1c-a5393f6e5557 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1054.964775] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a4ec7d39-6b42-4aaa-9dc1-ac2681b9f11a tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.533s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.965620] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.594s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.965810] env[62204]: INFO nova.compute.manager [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Unshelving [ 1055.099304] env[62204]: DEBUG oslo_concurrency.lockutils [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-a93880fc-e517-4d83-98c1-9ce2405bf9d5" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.099722] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200407, 'name': Rename_Task, 'duration_secs': 0.593918} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.100432] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91128dab-a96e-44af-a744-5c59f078b350 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.102878] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.103096] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d80b48a-5aa6-4aa5-8064-b89dc7602b84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.108011] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ee420c-9367-47fd-a1b7-88dd3e9234b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.111557] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1055.111557] env[62204]: value = "task-1200408" [ 1055.111557] env[62204]: _type = "Task" [ 1055.111557] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.126860] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.138206] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200406, 'name': ReconfigVM_Task, 'duration_secs': 1.20026} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.138476] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06/2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.139130] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1fd9eb7f-a903-48a7-8ff6-6ff1674139f3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.144796] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1055.144796] env[62204]: value = "task-1200409" [ 1055.144796] env[62204]: _type = "Task" [ 1055.144796] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.152777] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200409, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.563558] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828a5d1b-e2cc-49e2-858b-8ce8e07ee649 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.572416] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807b5f8d-a240-4cd9-a9fc-e7e459a02de9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.602188] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a2c2af-d6f3-4d96-82af-46eb5de9e8d8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.609379] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b5899e-066b-49a4-b4c6-d1c6114a4f21 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.625094] env[62204]: DEBUG nova.compute.provider_tree [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.628893] env[62204]: DEBUG oslo_vmware.api [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200408, 'name': PowerOnVM_Task, 'duration_secs': 0.383282} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.629371] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1055.629581] env[62204]: INFO nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Took 5.82 seconds to spawn the instance on the hypervisor. [ 1055.629763] env[62204]: DEBUG nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1055.630492] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8753967-9f27-4b09-8474-9c324d0330a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.653272] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200409, 'name': Rename_Task, 'duration_secs': 0.127485} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.653522] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.653741] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-745dc43f-db01-46f6-a3c2-5cab3f234a06 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.660778] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1055.660778] env[62204]: value = "task-1200410" [ 1055.660778] env[62204]: _type = "Task" [ 1055.660778] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.667798] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200410, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.987278] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.130466] env[62204]: DEBUG nova.scheduler.client.report [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1056.145176] env[62204]: INFO nova.compute.manager [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Took 14.13 seconds to build instance. [ 1056.172033] env[62204]: DEBUG oslo_vmware.api [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200410, 'name': PowerOnVM_Task, 'duration_secs': 0.455089} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.172033] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.172275] env[62204]: INFO nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1056.172485] env[62204]: DEBUG nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1056.173337] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0818f747-2f2e-45c4-a025-ee59a6da1737 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.197075] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.197075] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf15fe72-36b1-4cea-a8e7-04b9f03da58f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.203111] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1056.203111] env[62204]: value = "task-1200411" [ 1056.203111] env[62204]: _type = "Task" [ 1056.203111] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.210485] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.636601] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.182s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.637216] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1056.639794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.653s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1056.640018] env[62204]: DEBUG nova.objects.instance [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'pci_requests' on Instance uuid 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.646903] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2c8dbfae-cf90-4674-bbfe-5dd4b4f3c8c6 tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.640s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1056.688757] env[62204]: INFO nova.compute.manager [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Took 15.11 seconds to build instance. [ 1056.713753] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200411, 'name': PowerOffVM_Task, 'duration_secs': 0.200171} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.714202] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.714923] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1056.715184] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1056.715352] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.715540] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1056.715688] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.715839] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1056.716060] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1056.716233] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1056.716437] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1056.716613] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1056.716789] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1056.721967] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2efe88a3-b989-43dd-b0c1-f7c667bc0ded {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.737160] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1056.737160] env[62204]: value = "task-1200412" [ 1056.737160] env[62204]: _type = "Task" [ 1056.737160] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.745449] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200412, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.843127] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updating instance_info_cache with network_info: [{"id": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "address": "fa:16:3e:c2:f3:e1", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaae4d007-4d", "ovs_interfaceid": "aae4d007-4d74-4c2c-9d2c-6803c004abe1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.945608] env[62204]: DEBUG nova.compute.manager [None req-c7abcd82-0a38-4014-ba78-3a3c285eff7a tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1056.946483] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806d1210-a69e-4698-b2d6-52cad32a3430 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.007821] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.008131] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.008369] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.008897] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.009177] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.011175] env[62204]: INFO nova.compute.manager [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Terminating instance [ 1057.012768] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "refresh_cache-5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.012928] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquired lock "refresh_cache-5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.013111] env[62204]: DEBUG nova.network.neutron [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1057.143358] env[62204]: DEBUG nova.compute.utils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1057.146107] env[62204]: DEBUG nova.objects.instance [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'numa_topology' on Instance uuid 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.147445] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1057.147445] env[62204]: DEBUG nova.network.neutron [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1057.191178] env[62204]: DEBUG oslo_concurrency.lockutils [None req-4f34c39e-0904-4839-9651-5e28efb7c489 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.621s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.219812] env[62204]: DEBUG nova.policy [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6054f141cad7421f85bbb5944f408070', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6907df6f17b142c0b4881f15f3b88a9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1057.247199] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200412, 'name': ReconfigVM_Task, 'duration_secs': 0.147165} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.248014] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae695b8a-567d-4f6e-ad82-bee5148b55c1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.268438] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1057.268698] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1057.268890] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.269054] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1057.269247] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.269380] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1057.269609] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1057.269774] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1057.269943] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1057.270124] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1057.270305] env[62204]: DEBUG nova.virt.hardware [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1057.271142] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43336210-1473-4527-abe1-28d7d9542249 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.276336] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1057.276336] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524964dc-3c05-b763-3fe9-7ba541b9866b" [ 1057.276336] env[62204]: _type = "Task" [ 1057.276336] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.285091] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524964dc-3c05-b763-3fe9-7ba541b9866b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.346027] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Releasing lock "refresh_cache-98805916-8501-4afb-9e1c-a5393f6e5557" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.346027] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updated the network info_cache for instance {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1057.346027] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.346258] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.346258] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.346386] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.346539] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.346686] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.346819] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1057.346967] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.457849] env[62204]: INFO nova.compute.manager [None req-c7abcd82-0a38-4014-ba78-3a3c285eff7a tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] instance snapshotting [ 1057.457849] env[62204]: DEBUG nova.objects.instance [None req-c7abcd82-0a38-4014-ba78-3a3c285eff7a tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lazy-loading 'flavor' on Instance uuid 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.533922] env[62204]: DEBUG nova.network.neutron [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1057.591522] env[62204]: DEBUG nova.network.neutron [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.612141] env[62204]: DEBUG nova.network.neutron [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Successfully created port: 34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1057.648185] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1057.654073] env[62204]: INFO nova.compute.claims [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.791364] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524964dc-3c05-b763-3fe9-7ba541b9866b, 'name': SearchDatastore_Task, 'duration_secs': 0.00738} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.797607] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1057.797930] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7209936-4c1d-4fb1-a568-b180d06819c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.821027] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1057.821027] env[62204]: value = "task-1200413" [ 1057.821027] env[62204]: _type = "Task" [ 1057.821027] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.832738] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.849927] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.964826] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89576dc-4a62-4a8a-808b-ddcc7e9edea7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.986564] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1346b915-4f10-4a06-84ae-1e8cfec7ba5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.094240] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Releasing lock "refresh_cache-5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.094784] env[62204]: DEBUG nova.compute.manager [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1058.095057] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.096052] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03ab877-17b2-424c-a719-2fde07374f4d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.104440] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.104737] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-692992e7-9ca2-4725-86bd-d18d47209dfe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.111390] env[62204]: DEBUG oslo_vmware.api [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1058.111390] env[62204]: value = "task-1200414" [ 1058.111390] env[62204]: _type = "Task" [ 1058.111390] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.120372] env[62204]: DEBUG oslo_vmware.api [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.237628] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.237908] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.238153] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1058.238360] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1058.238537] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1058.241267] env[62204]: INFO nova.compute.manager [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Terminating instance [ 1058.242914] env[62204]: DEBUG nova.compute.manager [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1058.243141] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.244010] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4ac596-ef86-4390-b2d0-f2cff3b89da5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.252512] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.252773] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-426bb25c-38ab-4c45-a2ca-65f54ab4bd40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.260620] env[62204]: DEBUG oslo_vmware.api [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1058.260620] env[62204]: value = "task-1200415" [ 1058.260620] env[62204]: _type = "Task" [ 1058.260620] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.270394] env[62204]: DEBUG oslo_vmware.api [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.331074] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200413, 'name': ReconfigVM_Task, 'duration_secs': 0.220454} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.331492] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1058.332373] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe22227-9cfb-4610-a30e-425756df2cae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.360750] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1058.361248] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08bb41cf-b7d1-44a7-9c5e-7fc77ff55c2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.380246] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1058.380246] env[62204]: value = "task-1200416" [ 1058.380246] env[62204]: _type = "Task" [ 1058.380246] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.388438] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.498799] env[62204]: DEBUG nova.compute.manager [None req-c7abcd82-0a38-4014-ba78-3a3c285eff7a tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Instance disappeared during snapshot {{(pid=62204) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 1058.617586] env[62204]: DEBUG nova.compute.manager [None req-c7abcd82-0a38-4014-ba78-3a3c285eff7a tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Found 0 images (rotation: 2) {{(pid=62204) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 1058.625036] env[62204]: DEBUG oslo_vmware.api [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200414, 'name': PowerOffVM_Task, 'duration_secs': 0.366689} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.625389] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.625630] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.625948] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a548a84-57b9-429b-b11d-1cea5b1e19c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.648890] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.649332] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.649563] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Deleting the datastore file [datastore1] 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.650094] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b450ef0e-ca55-4333-a277-dc66dc310b95 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.659386] env[62204]: DEBUG oslo_vmware.api [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for the task: (returnval){ [ 1058.659386] env[62204]: value = "task-1200418" [ 1058.659386] env[62204]: _type = "Task" [ 1058.659386] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.667346] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1058.671628] env[62204]: DEBUG oslo_vmware.api [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.693925] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1058.694191] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1058.694354] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.694540] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1058.694690] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.694851] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1058.695093] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1058.695282] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1058.695455] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1058.695617] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1058.695788] env[62204]: DEBUG nova.virt.hardware [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1058.697056] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b122ac-053a-4d18-a758-3b4ba8ba704d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.704217] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff8a82d5-3d66-476c-83ec-20ad798c9b6b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.770263] env[62204]: DEBUG oslo_vmware.api [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200415, 'name': PowerOffVM_Task, 'duration_secs': 0.229167} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.772907] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.773107] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.773544] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6c15d7b-d256-45da-b315-b49bf2d2c234 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.794402] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d14c51-7cfd-4c0b-b253-129b3d5f444b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.801700] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f91a916-b84d-48d6-8b39-ac5d506da029 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.831910] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d03a4cb-307d-401e-880f-78eb8673c2db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.837935] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.838152] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.838341] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore1] 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.838584] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21361a3f-bba4-4368-8fe2-3927a996debb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.843489] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ec0836-0a30-41a9-ac52-db989e0b8fc5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.848291] env[62204]: DEBUG oslo_vmware.api [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1058.848291] env[62204]: value = "task-1200420" [ 1058.848291] env[62204]: _type = "Task" [ 1058.848291] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.860383] env[62204]: DEBUG nova.compute.provider_tree [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.865661] env[62204]: DEBUG oslo_vmware.api [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.888780] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200416, 'name': ReconfigVM_Task, 'duration_secs': 0.300836} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.889047] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to attach disk [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5/a93880fc-e517-4d83-98c1-9ce2405bf9d5.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.889836] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711ecb56-1233-4346-939d-f0f868d15f4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.909910] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf110a3-f92f-4298-86b4-6948ef45f4dd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.930306] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3fa860f-151d-4df1-a1a2-22a436ae161a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.950801] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bfbc4a-9a3d-495f-96f0-31d649b90561 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.957672] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1058.957923] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9fe0983-2fe3-465c-826a-2d3f12b1a8ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.964490] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1058.964490] env[62204]: value = "task-1200421" [ 1058.964490] env[62204]: _type = "Task" [ 1058.964490] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.971773] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.168913] env[62204]: DEBUG oslo_vmware.api [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Task: {'id': task-1200418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116376} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.169410] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.169712] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.170015] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.170403] env[62204]: INFO nova.compute.manager [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1059.170761] env[62204]: DEBUG oslo.service.loopingcall [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1059.171079] env[62204]: DEBUG nova.compute.manager [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1059.171270] env[62204]: DEBUG nova.network.neutron [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1059.175396] env[62204]: DEBUG nova.compute.manager [req-75ceef2e-d2cd-4504-abbb-e11ee056552d req-f17ef051-3d48-4922-91bb-46169cf3ebe6 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Received event network-vif-plugged-34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1059.175728] env[62204]: DEBUG oslo_concurrency.lockutils [req-75ceef2e-d2cd-4504-abbb-e11ee056552d req-f17ef051-3d48-4922-91bb-46169cf3ebe6 service nova] Acquiring lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.176063] env[62204]: DEBUG oslo_concurrency.lockutils [req-75ceef2e-d2cd-4504-abbb-e11ee056552d req-f17ef051-3d48-4922-91bb-46169cf3ebe6 service nova] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.176369] env[62204]: DEBUG oslo_concurrency.lockutils [req-75ceef2e-d2cd-4504-abbb-e11ee056552d req-f17ef051-3d48-4922-91bb-46169cf3ebe6 service nova] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.176673] env[62204]: DEBUG nova.compute.manager [req-75ceef2e-d2cd-4504-abbb-e11ee056552d req-f17ef051-3d48-4922-91bb-46169cf3ebe6 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] No waiting events found dispatching network-vif-plugged-34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1059.176972] env[62204]: WARNING nova.compute.manager [req-75ceef2e-d2cd-4504-abbb-e11ee056552d req-f17ef051-3d48-4922-91bb-46169cf3ebe6 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Received unexpected event network-vif-plugged-34d2b53b-7f04-471d-b817-7fa154770046 for instance with vm_state building and task_state spawning. [ 1059.191411] env[62204]: DEBUG nova.network.neutron [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1059.260212] env[62204]: DEBUG nova.network.neutron [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Successfully updated port: 34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1059.359627] env[62204]: DEBUG oslo_vmware.api [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22007} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.359899] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.360132] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.360336] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.360536] env[62204]: INFO nova.compute.manager [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1059.360807] env[62204]: DEBUG oslo.service.loopingcall [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1059.361038] env[62204]: DEBUG nova.compute.manager [-] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1059.361145] env[62204]: DEBUG nova.network.neutron [-] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1059.363398] env[62204]: DEBUG nova.scheduler.client.report [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1059.475202] env[62204]: DEBUG oslo_vmware.api [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200421, 'name': PowerOnVM_Task, 'duration_secs': 0.386557} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.475534] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1059.693620] env[62204]: DEBUG nova.network.neutron [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.762924] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1059.763084] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.763267] env[62204]: DEBUG nova.network.neutron [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1059.868781] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.229s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.871033] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.021s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.871231] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1059.871391] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1059.872268] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb9e653-74bb-49b2-af04-44494b61e9de {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.881018] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865bacb3-4429-472d-a78e-d2dc0245ed86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.894602] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633e859a-7a96-4ee8-af41-e3e9abad967f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.901136] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b331143a-33b9-4c51-8726-9e3effd89101 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.930345] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179249MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1059.930523] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1059.930730] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1059.935076] env[62204]: INFO nova.network.neutron [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1060.180333] env[62204]: DEBUG nova.network.neutron [-] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.196281] env[62204]: INFO nova.compute.manager [-] [instance: 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c] Took 1.02 seconds to deallocate network for instance. [ 1060.450135] env[62204]: DEBUG nova.network.neutron [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1060.521577] env[62204]: INFO nova.compute.manager [None req-5646030a-5fcc-4de9-b667-94575cd9d852 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance to original state: 'active' [ 1060.574441] env[62204]: DEBUG nova.network.neutron [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [{"id": "34d2b53b-7f04-471d-b817-7fa154770046", "address": "fa:16:3e:ec:e1:09", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d2b53b-7f", "ovs_interfaceid": "34d2b53b-7f04-471d-b817-7fa154770046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.682084] env[62204]: INFO nova.compute.manager [-] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Took 1.32 seconds to deallocate network for instance. [ 1060.702076] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1060.966054] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 98805916-8501-4afb-9e1c-a5393f6e5557 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.966295] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.966474] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance a93880fc-e517-4d83-98c1-9ce2405bf9d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.966632] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.966782] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.966930] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.967106] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 27ecfd31-6c25-436b-a2fa-27a40f1b0f36 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.967242] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 3258243e-a9df-4b3e-a6bd-17e3b2168efe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1060.967438] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1060.967576] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1061.077449] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1061.077775] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Instance network_info: |[{"id": "34d2b53b-7f04-471d-b817-7fa154770046", "address": "fa:16:3e:ec:e1:09", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d2b53b-7f", "ovs_interfaceid": "34d2b53b-7f04-471d-b817-7fa154770046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1061.078245] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:e1:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4105fae7-3de1-4a6c-a44b-39097c174699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34d2b53b-7f04-471d-b817-7fa154770046', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1061.086582] env[62204]: DEBUG oslo.service.loopingcall [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1061.089334] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1061.089767] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8709ec87-0749-4f55-90a2-c58bca434988 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.111636] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1061.111636] env[62204]: value = "task-1200422" [ 1061.111636] env[62204]: _type = "Task" [ 1061.111636] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.121305] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200422, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.156511] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9a4a59-69e7-40ed-a7a8-120f65a75a50 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.163743] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4ba5ab-70a1-4cbf-b1a1-5cfe6383541a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.196602] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.197656] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a26fa201-82ae-483f-9b9b-d5188c8c7b98 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.205805] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c64c250-9e2c-4fd7-aa30-f26d6f865d0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.219223] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.237275] env[62204]: DEBUG nova.compute.manager [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Received event network-changed-34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1061.237554] env[62204]: DEBUG nova.compute.manager [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Refreshing instance network info cache due to event network-changed-34d2b53b-7f04-471d-b817-7fa154770046. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1061.237834] env[62204]: DEBUG oslo_concurrency.lockutils [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] Acquiring lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.237997] env[62204]: DEBUG oslo_concurrency.lockutils [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] Acquired lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.238258] env[62204]: DEBUG nova.network.neutron [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Refreshing network info cache for port 34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1061.545750] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.548795] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.548795] env[62204]: DEBUG nova.network.neutron [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1061.622014] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200422, 'name': CreateVM_Task, 'duration_secs': 0.308294} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.622718] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1061.623164] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1061.623440] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.623838] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1061.624168] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e35a287-9403-4cf0-9884-d91f6e314e87 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.628705] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1061.628705] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524411d9-01d0-e854-a713-9661b825291d" [ 1061.628705] env[62204]: _type = "Task" [ 1061.628705] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.636188] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524411d9-01d0-e854-a713-9661b825291d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.722447] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1061.885780] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.886452] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.886732] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1061.886978] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1061.887318] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.889440] env[62204]: INFO nova.compute.manager [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Terminating instance [ 1061.891467] env[62204]: DEBUG nova.compute.manager [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1061.891723] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1061.892016] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54a9faab-2bc1-4431-9682-e41372b74dcf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.898976] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1061.898976] env[62204]: value = "task-1200423" [ 1061.898976] env[62204]: _type = "Task" [ 1061.898976] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.909779] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.074383] env[62204]: DEBUG nova.network.neutron [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updated VIF entry in instance network info cache for port 34d2b53b-7f04-471d-b817-7fa154770046. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1062.074813] env[62204]: DEBUG nova.network.neutron [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [{"id": "34d2b53b-7f04-471d-b817-7fa154770046", "address": "fa:16:3e:ec:e1:09", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d2b53b-7f", "ovs_interfaceid": "34d2b53b-7f04-471d-b817-7fa154770046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.138535] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524411d9-01d0-e854-a713-9661b825291d, 'name': SearchDatastore_Task, 'duration_secs': 0.011477} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.138846] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.139099] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1062.139352] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1062.139534] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.139729] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1062.139979] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18a93d44-fce9-4250-b83a-9c009d65dc26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.147433] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1062.147607] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1062.150174] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30b37c45-7413-41dc-838f-a541ad4d253f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.157202] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1062.157202] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e587c7-8453-dc16-00f7-bcf64f23baaa" [ 1062.157202] env[62204]: _type = "Task" [ 1062.157202] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.164351] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e587c7-8453-dc16-00f7-bcf64f23baaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.227265] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1062.227467] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.297s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1062.227723] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.526s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1062.227941] env[62204]: DEBUG nova.objects.instance [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lazy-loading 'resources' on Instance uuid 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.228975] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1062.229146] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Cleaning up deleted instances {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1062.264344] env[62204]: DEBUG nova.network.neutron [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.410878] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200423, 'name': PowerOffVM_Task, 'duration_secs': 0.191764} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.411165] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1062.411373] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1062.411567] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260131', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'name': 'volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'a93880fc-e517-4d83-98c1-9ce2405bf9d5', 'attached_at': '2024-10-08T23:45:26.000000', 'detached_at': '', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'serial': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1062.412339] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2d868e-ef0b-4498-8a7a-71126850a274 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.432481] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f50623-9837-4ee5-97a8-7030b80b3034 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.438363] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89aa7c13-f37a-40d3-8e75-1baadec513e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.457527] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1462e9-5ed1-4af5-855d-791bfc262067 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.470982] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] The volume has not been displaced from its original location: [datastore1] volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5/volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1062.475985] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfiguring VM instance instance-00000060 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1062.476244] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1a780bf-3150-489a-9902-7d02158b7c2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.492692] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1062.492692] env[62204]: value = "task-1200424" [ 1062.492692] env[62204]: _type = "Task" [ 1062.492692] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.499645] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.577598] env[62204]: DEBUG oslo_concurrency.lockutils [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] Releasing lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.578062] env[62204]: DEBUG nova.compute.manager [req-e2850bc0-ecfc-46d8-bf85-61073598b869 req-089eac7a-c0d7-4405-a2e0-c54a4b888e77 service nova] [instance: 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06] Received event network-vif-deleted-61c7f8ce-1fd0-468f-be5b-df93ccf11da1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1062.669689] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e587c7-8453-dc16-00f7-bcf64f23baaa, 'name': SearchDatastore_Task, 'duration_secs': 0.008123} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.670487] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eeb9fdf-d8bf-43b0-8e80-51bafe39411b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.675265] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1062.675265] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5287f204-06eb-6c44-9981-3f40c7c1e4e4" [ 1062.675265] env[62204]: _type = "Task" [ 1062.675265] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.683416] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5287f204-06eb-6c44-9981-3f40c7c1e4e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.747593] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] There are 55 instances to clean {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1062.747870] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 8081d981-42c4-46e4-82e7-2f8b59a68465] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1062.769435] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1062.794176] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='cb25682dcf0ff84dfe83ffeef0f3b1c9',container_format='bare',created_at=2024-10-08T23:45:02Z,direct_url=,disk_format='vmdk',id=7f840d5f-1f8a-4e6d-95b3-265fb5a5d126,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-159491708-shelved',owner='8108a8f6b5e04832aab188333bad1e0e',properties=ImageMetaProps,protected=,size=31662592,status='active',tags=,updated_at=2024-10-08T23:45:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1062.794497] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1062.794613] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1062.794787] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1062.794945] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1062.795166] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1062.795391] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1062.795558] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1062.795725] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1062.795889] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1062.796076] env[62204]: DEBUG nova.virt.hardware [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1062.797219] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0cae28-5abe-4ad7-9bb8-3d3854fbb23c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.805389] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc6a9e4-b5b5-4a64-ad71-6a8d2a4756f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.820791] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:85:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59c7be21-51f9-4357-a2e4-24ec0bf0ed20', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.828133] env[62204]: DEBUG oslo.service.loopingcall [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1062.830332] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.830695] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d46e433d-d67e-493b-a29d-3b5542cd1892 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.850050] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.850050] env[62204]: value = "task-1200425" [ 1062.850050] env[62204]: _type = "Task" [ 1062.850050] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.856229] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200425, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.983070] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7863aa16-f48d-40b6-92f5-e67f98c4f273 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.989539] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84312994-5d89-4d5d-9fe4-191407ddb86a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.022044] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fdd7f9-76e5-426d-83b2-19d2f871dcc8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.027584] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200424, 'name': ReconfigVM_Task, 'duration_secs': 0.199235} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.028296] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Reconfigured VM instance instance-00000060 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1063.035075] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-301f05cb-c5a3-4e05-861b-8f0dfb1a0f9d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.049325] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19f9572-5d91-4f06-b504-f33b735b7c88 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.062945] env[62204]: DEBUG nova.compute.provider_tree [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1063.065527] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1063.065527] env[62204]: value = "task-1200426" [ 1063.065527] env[62204]: _type = "Task" [ 1063.065527] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.073688] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200426, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.186604] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5287f204-06eb-6c44-9981-3f40c7c1e4e4, 'name': SearchDatastore_Task, 'duration_secs': 0.00844} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.186920] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.187259] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36/27ecfd31-6c25-436b-a2fa-27a40f1b0f36.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1063.187533] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-783a55d2-44d8-4b60-8e57-70e64bf85d0b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.194245] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1063.194245] env[62204]: value = "task-1200427" [ 1063.194245] env[62204]: _type = "Task" [ 1063.194245] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.201920] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.250988] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: d3be85d1-34b6-4b00-9740-c3abdb4b0734] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1063.263464] env[62204]: DEBUG nova.compute.manager [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-vif-plugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1063.263690] env[62204]: DEBUG oslo_concurrency.lockutils [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.263891] env[62204]: DEBUG oslo_concurrency.lockutils [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.264076] env[62204]: DEBUG oslo_concurrency.lockutils [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.264278] env[62204]: DEBUG nova.compute.manager [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] No waiting events found dispatching network-vif-plugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1063.264450] env[62204]: WARNING nova.compute.manager [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received unexpected event network-vif-plugged-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 for instance with vm_state shelved_offloaded and task_state spawning. [ 1063.264609] env[62204]: DEBUG nova.compute.manager [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1063.264763] env[62204]: DEBUG nova.compute.manager [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing instance network info cache due to event network-changed-59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1063.264955] env[62204]: DEBUG oslo_concurrency.lockutils [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.265144] env[62204]: DEBUG oslo_concurrency.lockutils [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.265311] env[62204]: DEBUG nova.network.neutron [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Refreshing network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1063.361883] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200425, 'name': CreateVM_Task, 'duration_secs': 0.291761} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.362243] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1063.363065] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.363253] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.363671] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1063.363945] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65f99087-cdf2-4678-89b9-4506aeb02637 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.371653] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1063.371653] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52407be9-57ad-acc3-bf07-a5f311f15618" [ 1063.371653] env[62204]: _type = "Task" [ 1063.371653] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.382617] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52407be9-57ad-acc3-bf07-a5f311f15618, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.567744] env[62204]: DEBUG nova.scheduler.client.report [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1063.583659] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200426, 'name': ReconfigVM_Task, 'duration_secs': 0.13043} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.584065] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260131', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'name': 'volume-e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'a93880fc-e517-4d83-98c1-9ce2405bf9d5', 'attached_at': '2024-10-08T23:45:26.000000', 'detached_at': '', 'volume_id': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5', 'serial': 'e2243127-d9d5-46bb-a37c-1bd28401ffa5'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1063.584431] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1063.585289] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea7b969-f4b5-4fca-b30b-14112a7f291e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.592823] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1063.593140] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df79a77d-9f36-4897-bd92-84cd3684803e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.698664] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1063.698807] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1063.698976] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleting the datastore file [datastore2] a93880fc-e517-4d83-98c1-9ce2405bf9d5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1063.699612] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac6537b3-e610-4fa4-816e-c3e801c3d54a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.704552] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.40228} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.705101] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36/27ecfd31-6c25-436b-a2fa-27a40f1b0f36.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1063.705332] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1063.705569] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-895649c7-cc6f-41a0-b0e8-9995c6c6d5ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.708548] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1063.708548] env[62204]: value = "task-1200429" [ 1063.708548] env[62204]: _type = "Task" [ 1063.708548] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.712324] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1063.712324] env[62204]: value = "task-1200430" [ 1063.712324] env[62204]: _type = "Task" [ 1063.712324] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.717829] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.722745] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.754521] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 032bbedb-7663-45a3-b2d0-37570d38f573] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1063.884970] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1063.885252] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Processing image 7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.885516] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1063.885682] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.885865] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.886160] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dd2eafd-6b3e-4ed0-b565-eb99dd26208c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.893917] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.894116] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.894844] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7e3e390-46fc-4d01-a580-2ad5544dd82f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.899808] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1063.899808] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52587473-bdbd-8699-a312-c54e9c70eb10" [ 1063.899808] env[62204]: _type = "Task" [ 1063.899808] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.907451] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52587473-bdbd-8699-a312-c54e9c70eb10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.977767] env[62204]: DEBUG nova.network.neutron [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updated VIF entry in instance network info cache for port 59c7be21-51f9-4357-a2e4-24ec0bf0ed20. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1063.978191] env[62204]: DEBUG nova.network.neutron [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.076365] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.078692] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.882s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1064.078932] env[62204]: DEBUG nova.objects.instance [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.101658] env[62204]: INFO nova.scheduler.client.report [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Deleted allocations for instance 5a3e46f9-ccf9-444e-89e1-6ca46c63d25c [ 1064.219860] env[62204]: DEBUG oslo_vmware.api [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203982} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.220499] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1064.220695] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1064.220869] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1064.221056] env[62204]: INFO nova.compute.manager [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Took 2.33 seconds to destroy the instance on the hypervisor. [ 1064.221309] env[62204]: DEBUG oslo.service.loopingcall [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1064.221506] env[62204]: DEBUG nova.compute.manager [-] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1064.221599] env[62204]: DEBUG nova.network.neutron [-] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1064.225625] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105981} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.226367] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1064.226856] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce1ab21-4fd9-4d52-a8a0-517fcbdc02cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.248586] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36/27ecfd31-6c25-436b-a2fa-27a40f1b0f36.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.248850] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e9254d25-887c-413f-bdce-805184a95269 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.263048] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 1c52b662-e436-4e0c-a77b-0f2fc1041a7d] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1064.272272] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1064.272272] env[62204]: value = "task-1200431" [ 1064.272272] env[62204]: _type = "Task" [ 1064.272272] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.282349] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200431, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.410480] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1064.410751] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Fetch image to [datastore2] OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5/OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1064.410941] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Downloading stream optimized image 7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 to [datastore2] OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5/OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5.vmdk on the data store datastore2 as vApp {{(pid=62204) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1064.411139] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Downloading image file data 7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 to the ESX as VM named 'OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5' {{(pid=62204) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1064.481106] env[62204]: DEBUG oslo_concurrency.lockutils [req-05cd8c35-eec6-4f33-8a57-5de7033a3fd8 req-5621c5d2-355a-4b68-8299-18a1c89c4e94 service nova] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1064.486207] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1064.486207] env[62204]: value = "resgroup-9" [ 1064.486207] env[62204]: _type = "ResourcePool" [ 1064.486207] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1064.486576] env[62204]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-cb726476-34cf-4402-acb3-7693f67cb6c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.507307] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease: (returnval){ [ 1064.507307] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248bd8e-bcb0-536b-a8b4-4b711c60e334" [ 1064.507307] env[62204]: _type = "HttpNfcLease" [ 1064.507307] env[62204]: } obtained for vApp import into resource pool (val){ [ 1064.507307] env[62204]: value = "resgroup-9" [ 1064.507307] env[62204]: _type = "ResourcePool" [ 1064.507307] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1064.508205] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the lease: (returnval){ [ 1064.508205] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248bd8e-bcb0-536b-a8b4-4b711c60e334" [ 1064.508205] env[62204]: _type = "HttpNfcLease" [ 1064.508205] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1064.514271] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1064.514271] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248bd8e-bcb0-536b-a8b4-4b711c60e334" [ 1064.514271] env[62204]: _type = "HttpNfcLease" [ 1064.514271] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1064.611274] env[62204]: DEBUG oslo_concurrency.lockutils [None req-010b1e6d-2061-4754-89d0-5bc47a5ee39d tempest-ServersAaction247Test-807625935 tempest-ServersAaction247Test-807625935-project-member] Lock "5a3e46f9-ccf9-444e-89e1-6ca46c63d25c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.603s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1064.695734] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba1a5e0-a47e-464f-a95a-697f5c94b559 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.705341] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa944b03-bc31-4d65-ae8c-a9e4a6861a14 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.709741] env[62204]: DEBUG nova.compute.manager [req-63a5ede9-bb18-40d3-bb40-b4582d1da428 req-d8ed5250-c56b-4211-b051-79a6d13339be service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Received event network-vif-deleted-a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1064.709826] env[62204]: INFO nova.compute.manager [req-63a5ede9-bb18-40d3-bb40-b4582d1da428 req-d8ed5250-c56b-4211-b051-79a6d13339be service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Neutron deleted interface a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee; detaching it from the instance and deleting it from the info cache [ 1064.709985] env[62204]: DEBUG nova.network.neutron [req-63a5ede9-bb18-40d3-bb40-b4582d1da428 req-d8ed5250-c56b-4211-b051-79a6d13339be service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.739329] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ceea59-f8f1-48e6-914e-88294a18c99b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.747989] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8683589d-c254-4420-9c53-e7e8ca0a32cf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.765110] env[62204]: DEBUG nova.compute.provider_tree [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.766382] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: ade509d8-5d7c-4926-bb2f-067dce84f76c] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1064.782377] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200431, 'name': ReconfigVM_Task, 'duration_secs': 0.340927} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.782569] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36/27ecfd31-6c25-436b-a2fa-27a40f1b0f36.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.783843] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec013842-2f9a-4762-82d2-1c3b783cd220 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.791544] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1064.791544] env[62204]: value = "task-1200433" [ 1064.791544] env[62204]: _type = "Task" [ 1064.791544] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.801399] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200433, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.016960] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1065.016960] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248bd8e-bcb0-536b-a8b4-4b711c60e334" [ 1065.016960] env[62204]: _type = "HttpNfcLease" [ 1065.016960] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1065.185591] env[62204]: DEBUG nova.network.neutron [-] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.212885] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-929627b6-6f51-436e-920f-08abbba2191c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.224982] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0462a4db-12fc-4abf-a9af-04f041d5613f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.254729] env[62204]: DEBUG nova.compute.manager [req-63a5ede9-bb18-40d3-bb40-b4582d1da428 req-d8ed5250-c56b-4211-b051-79a6d13339be service nova] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Detach interface failed, port_id=a7dcd5c1-4533-48ab-bcf1-3b5a4566f9ee, reason: Instance a93880fc-e517-4d83-98c1-9ce2405bf9d5 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1065.270046] env[62204]: DEBUG nova.scheduler.client.report [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1065.272906] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: e42444b3-51c9-4d0f-9eee-c6f2e6631997] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1065.304111] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200433, 'name': Rename_Task, 'duration_secs': 0.159297} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.304424] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1065.304675] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43e76a78-c02d-44fb-889a-7b6758613af6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.313172] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1065.313172] env[62204]: value = "task-1200434" [ 1065.313172] env[62204]: _type = "Task" [ 1065.313172] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.322091] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200434, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.518766] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1065.518766] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248bd8e-bcb0-536b-a8b4-4b711c60e334" [ 1065.518766] env[62204]: _type = "HttpNfcLease" [ 1065.518766] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1065.518766] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1065.518766] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5248bd8e-bcb0-536b-a8b4-4b711c60e334" [ 1065.518766] env[62204]: _type = "HttpNfcLease" [ 1065.518766] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1065.519122] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fccf1e8-2c3f-4b0b-af82-d23b8c10e01b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.526568] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522444b1-9771-2ccc-0566-487ddee90a9c/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1065.526762] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating HTTP connection to write to file with size = 31662592 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522444b1-9771-2ccc-0566-487ddee90a9c/disk-0.vmdk. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1065.591492] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a59794e7-a37a-4d2e-acb4-4bba9265fec3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.689159] env[62204]: INFO nova.compute.manager [-] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Took 1.47 seconds to deallocate network for instance. [ 1065.776561] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.698s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1065.781455] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: aa336eda-d55a-4560-81bf-e4fcc6f4b485] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1065.806112] env[62204]: INFO nova.scheduler.client.report [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance 2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06 [ 1065.825707] env[62204]: DEBUG oslo_vmware.api [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200434, 'name': PowerOnVM_Task, 'duration_secs': 0.447663} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.825954] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1065.826197] env[62204]: INFO nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1065.826387] env[62204]: DEBUG nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1065.827295] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bd1836-bfb8-4785-9fab-5a0e63942889 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.236491] env[62204]: INFO nova.compute.manager [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: a93880fc-e517-4d83-98c1-9ce2405bf9d5] Took 0.55 seconds to detach 1 volumes for instance. [ 1066.284958] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 2178b629-4be6-473b-9a75-19efa234d442] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1066.314022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-91e211b7-bf09-4387-aad3-d2981fb06fd4 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "2d63b659-a8d2-41c2-9f84-4b0f4bfa3a06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.076s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.351290] env[62204]: INFO nova.compute.manager [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Took 14.03 seconds to build instance. [ 1066.710933] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1066.710933] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522444b1-9771-2ccc-0566-487ddee90a9c/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1066.712178] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67356f15-7292-4f18-9b36-fabe7551d124 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.720002] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522444b1-9771-2ccc-0566-487ddee90a9c/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1066.720002] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522444b1-9771-2ccc-0566-487ddee90a9c/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1066.720777] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-68ad7e27-7879-4ef4-9d0c-b0a2bea72bdc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.747341] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.747673] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.747930] env[62204]: DEBUG nova.objects.instance [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'resources' on Instance uuid a93880fc-e517-4d83-98c1-9ce2405bf9d5 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.787607] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a383305-5b3b-4a7d-8834-d31e54eb4ba5] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1066.853883] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7efce0a2-4fe4-410a-b79f-b0944a305284 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.542s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1067.200965] env[62204]: DEBUG oslo_vmware.rw_handles [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522444b1-9771-2ccc-0566-487ddee90a9c/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1067.201173] env[62204]: INFO nova.virt.vmwareapi.images [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Downloaded image file data 7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 [ 1067.202138] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a902c011-cf81-47ce-8a7c-0da345df002c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.218114] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63583a12-9ff6-4d9b-8fe6-a95eb7d152bb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.291177] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: ddef8de2-530e-4b94-aff1-6f7e410f44fb] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1067.359759] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b6c821-efd3-45db-b7ca-95595672ccc6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.367834] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd391a8c-e26d-4bce-b2e4-00bdbba74155 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.399222] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26df9a01-6a41-49bf-8136-9dcedac90b82 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.407097] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c824a8c9-b602-4f9b-84f4-06aa47d1f78f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.421446] env[62204]: DEBUG nova.compute.provider_tree [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.423827] env[62204]: INFO nova.virt.vmwareapi.images [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] The imported VM was unregistered [ 1067.426220] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1067.426459] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Creating directory with path [datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1067.426947] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15b28383-04a3-4468-b804-267a32f231cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.449528] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Created directory with path [datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1067.449715] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5/OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5.vmdk to [datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk. {{(pid=62204) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1067.450045] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-679bf435-d3d9-4023-871d-f107227c39b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.459696] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1067.459696] env[62204]: value = "task-1200436" [ 1067.459696] env[62204]: _type = "Task" [ 1067.459696] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.469986] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200436, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.545255] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1067.545496] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1067.786334] env[62204]: DEBUG nova.compute.manager [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Stashing vm_state: active {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1067.795046] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 21056adb-d81e-45bd-b354-1bcb488d2ed9] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1067.928801] env[62204]: DEBUG nova.scheduler.client.report [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1067.972497] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200436, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.048481] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1068.298018] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 9cf3ca28-443f-4e06-9f04-103b5b6cddd4] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1068.309431] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.434930] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.687s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.437959] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.129s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.457317] env[62204]: INFO nova.scheduler.client.report [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted allocations for instance a93880fc-e517-4d83-98c1-9ce2405bf9d5 [ 1068.477957] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200436, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.573623] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.802073] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7671c77f-3da8-4a41-a472-138c7bd23a92] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1068.943452] env[62204]: INFO nova.compute.claims [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1068.970948] env[62204]: DEBUG oslo_concurrency.lockutils [None req-78d922fd-05f6-4466-83fd-be2e0e2771c8 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "a93880fc-e517-4d83-98c1-9ce2405bf9d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.085s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1068.984418] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200436, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.305133] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 4fd0c913-8344-4fb9-91ad-f8ab64c6e89a] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1069.450103] env[62204]: INFO nova.compute.resource_tracker [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating resource usage from migration a10de0f5-5897-40e4-bf08-9f11f644d8f8 [ 1069.481045] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200436, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.657028] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7039f309-7a1c-49c4-b510-e19b437d091a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.669332] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be1e3c6-15cf-4439-a906-671179866fa0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.709369] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f03f755-18cf-4ace-8cf6-abb9d38a1d25 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.724054] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c9a477-a8d2-455a-93e0-eb1a6a135f9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.742616] env[62204]: DEBUG nova.compute.provider_tree [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.809146] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7a0e579d-38e7-4f04-bf4d-1076dfc3b374] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1069.976075] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200436, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.508778} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.976374] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5/OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5.vmdk to [datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk. [ 1069.976607] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Cleaning up location [datastore2] OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1069.976852] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_a3c8c9df-eb44-4788-9b9b-c40bb48276e5 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1069.977275] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7e239ae-f184-4651-b738-6faeebf0e4f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.984971] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1069.984971] env[62204]: value = "task-1200437" [ 1069.984971] env[62204]: _type = "Task" [ 1069.984971] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.992946] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200437, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.226739] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.226965] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.246008] env[62204]: DEBUG nova.scheduler.client.report [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1070.313444] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 274285e5-fc23-48b4-b0d6-5a67bc764d78] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1070.495924] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200437, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123961} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.496156] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.496382] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1070.496649] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk to [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.496907] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea1fd27a-5553-47b8-bf7f-824ef2feefad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.503802] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1070.503802] env[62204]: value = "task-1200438" [ 1070.503802] env[62204]: _type = "Task" [ 1070.503802] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.511644] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.729514] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1070.750431] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.312s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.750604] env[62204]: INFO nova.compute.manager [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Migrating [ 1070.757610] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.184s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.759104] env[62204]: INFO nova.compute.claims [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1070.816405] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: d97d792d-614f-42e3-8516-6c0a7cf15ad5] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.017819] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200438, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.273613] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1071.273794] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.273962] env[62204]: DEBUG nova.network.neutron [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1071.292461] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.320012] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 259ba9b5-cfc9-420b-8ab0-97ebdc36cfc1] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.519380] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200438, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.823626] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: f445a8ea-ff21-44e9-8389-231a03c51650] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1071.904062] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bef5dda-30ab-4559-9716-ddcc473c2709 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.915870] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5da74d-b509-4100-a1ff-1fe33f58a3c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.954017] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530ab1e3-7f31-4c7a-9701-f2977a29fe61 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.966815] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c451bf-14b8-4604-92ad-cd8f26671b6c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.987596] env[62204]: DEBUG nova.compute.provider_tree [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.021659] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200438, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.026839] env[62204]: DEBUG nova.network.neutron [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [{"id": "34d2b53b-7f04-471d-b817-7fa154770046", "address": "fa:16:3e:ec:e1:09", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d2b53b-7f", "ovs_interfaceid": "34d2b53b-7f04-471d-b817-7fa154770046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.329722] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: bd0f87d1-e53a-4433-afc6-6aea7e68d6f3] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.491629] env[62204]: DEBUG nova.scheduler.client.report [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1072.522556] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200438, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.529404] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1072.834160] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7b7032a8-8093-43fb-b2e2-c6308d96e819] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1072.997452] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1072.997973] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1073.000598] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.708s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1073.002052] env[62204]: INFO nova.compute.claims [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.017375] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200438, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.185455} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.017619] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126/7f840d5f-1f8a-4e6d-95b3-265fb5a5d126.vmdk to [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1073.018448] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2d7b66-7ce1-4452-a622-10c292828b5c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.045258] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1073.046695] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-789ace5c-9b18-422e-a4ec-a2c028e5b4d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.069404] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1073.069404] env[62204]: value = "task-1200439" [ 1073.069404] env[62204]: _type = "Task" [ 1073.069404] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.078503] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.337618] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 211ca0c1-cf05-4148-ad5c-46cbbd72278e] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1073.506811] env[62204]: DEBUG nova.compute.utils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1073.511514] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1073.511771] env[62204]: DEBUG nova.network.neutron [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1073.553848] env[62204]: DEBUG nova.policy [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f57a0e000a79440489a0009f1b2390e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7cc2d3674b2a4fa3806dc0286481368e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1073.582214] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200439, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.809898] env[62204]: DEBUG nova.network.neutron [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Successfully created port: 371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1073.841583] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 62605b48-e640-4b4d-ab77-1ed44a75daa3] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.012137] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1074.056069] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dc947d-c85d-4559-8f47-0dfe2b87947a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.085948] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1074.107652] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200439, 'name': ReconfigVM_Task, 'duration_secs': 0.747338} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.108162] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe/3258243e-a9df-4b3e-a6bd-17e3b2168efe.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1074.112971] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c93c0fe-3fde-4c69-9b8a-95779e347dff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.122109] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1074.122109] env[62204]: value = "task-1200440" [ 1074.122109] env[62204]: _type = "Task" [ 1074.122109] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.139173] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200440, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.183246] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1e7e45-825d-4665-b5f3-1b2faf9bae03 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.193758] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08fbe7a-ca56-47cf-8b54-e944781f33f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.233308] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db6bea4-ec1b-4cf1-8046-7eb4f0c3897e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.241219] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0addb000-12b4-4485-8665-26f638394820 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.256577] env[62204]: DEBUG nova.compute.provider_tree [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.345051] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a720922-60ea-4b31-ba56-cdcbba1ab629] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1074.600904] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.601234] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8b1c0682-44f4-46d7-bad3-3c6e688ad9a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.609574] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1074.609574] env[62204]: value = "task-1200441" [ 1074.609574] env[62204]: _type = "Task" [ 1074.609574] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.618466] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200441, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.631047] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200440, 'name': Rename_Task, 'duration_secs': 0.148595} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.631461] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1074.631777] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78e58806-7478-4603-860d-aa6aec4b8536 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.638811] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1074.638811] env[62204]: value = "task-1200442" [ 1074.638811] env[62204]: _type = "Task" [ 1074.638811] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.647083] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200442, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.759406] env[62204]: DEBUG nova.scheduler.client.report [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1074.848632] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 4dc4546f-85e6-4259-9ccd-a7396669eace] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.025428] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1075.053714] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1075.053984] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1075.054159] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.054465] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1075.054680] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.054834] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1075.055068] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1075.055235] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1075.055406] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1075.055576] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1075.055744] env[62204]: DEBUG nova.virt.hardware [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.056603] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8f726b-5ffa-4204-b69c-fe59b97e7036 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.065967] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c1897a-5687-42ab-968e-5878d054de94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.120066] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200441, 'name': PowerOffVM_Task, 'duration_secs': 0.242567} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.120307] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.120535] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1075.149467] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200442, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.172343] env[62204]: DEBUG oslo_concurrency.lockutils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.172621] env[62204]: DEBUG oslo_concurrency.lockutils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.193842] env[62204]: DEBUG nova.compute.manager [req-ac163b8d-8c8a-437e-846d-f12947f06f1b req-76f4060b-ca36-461d-8186-f544a865d5f8 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Received event network-vif-plugged-371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1075.194080] env[62204]: DEBUG oslo_concurrency.lockutils [req-ac163b8d-8c8a-437e-846d-f12947f06f1b req-76f4060b-ca36-461d-8186-f544a865d5f8 service nova] Acquiring lock "0085a64c-583b-4d27-b39f-19ee32d67ab4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1075.194390] env[62204]: DEBUG oslo_concurrency.lockutils [req-ac163b8d-8c8a-437e-846d-f12947f06f1b req-76f4060b-ca36-461d-8186-f544a865d5f8 service nova] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1075.194466] env[62204]: DEBUG oslo_concurrency.lockutils [req-ac163b8d-8c8a-437e-846d-f12947f06f1b req-76f4060b-ca36-461d-8186-f544a865d5f8 service nova] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.194631] env[62204]: DEBUG nova.compute.manager [req-ac163b8d-8c8a-437e-846d-f12947f06f1b req-76f4060b-ca36-461d-8186-f544a865d5f8 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] No waiting events found dispatching network-vif-plugged-371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1075.194798] env[62204]: WARNING nova.compute.manager [req-ac163b8d-8c8a-437e-846d-f12947f06f1b req-76f4060b-ca36-461d-8186-f544a865d5f8 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Received unexpected event network-vif-plugged-371d9a0f-9978-426a-8031-38da073e9e35 for instance with vm_state building and task_state spawning. [ 1075.265124] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.264s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1075.265912] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1075.353554] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 031cb3ff-4a80-4961-a399-de31fc72e65b] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1075.628702] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1075.628965] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1075.629147] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1075.629340] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1075.629493] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1075.629642] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1075.629851] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1075.630028] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1075.630204] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1075.630373] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1075.630551] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1075.635595] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c07a17b2-2b22-4483-a686-eb25c00dd57d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.654583] env[62204]: DEBUG oslo_vmware.api [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200442, 'name': PowerOnVM_Task, 'duration_secs': 0.686818} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.655840] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1075.657833] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1075.657833] env[62204]: value = "task-1200443" [ 1075.657833] env[62204]: _type = "Task" [ 1075.657833] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.666191] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200443, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.675287] env[62204]: DEBUG nova.compute.utils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.728840] env[62204]: DEBUG nova.network.neutron [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Successfully updated port: 371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1075.755708] env[62204]: DEBUG nova.compute.manager [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1075.756937] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a971bdf-2804-4fbb-b882-29eb51889982 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.773833] env[62204]: DEBUG nova.compute.utils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1075.773833] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1075.774365] env[62204]: DEBUG nova.network.neutron [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1075.784145] env[62204]: DEBUG nova.compute.manager [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Received event network-changed-371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1075.784373] env[62204]: DEBUG nova.compute.manager [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Refreshing instance network info cache due to event network-changed-371d9a0f-9978-426a-8031-38da073e9e35. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1075.784580] env[62204]: DEBUG oslo_concurrency.lockutils [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] Acquiring lock "refresh_cache-0085a64c-583b-4d27-b39f-19ee32d67ab4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1075.784743] env[62204]: DEBUG oslo_concurrency.lockutils [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] Acquired lock "refresh_cache-0085a64c-583b-4d27-b39f-19ee32d67ab4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.784918] env[62204]: DEBUG nova.network.neutron [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Refreshing network info cache for port 371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1075.819155] env[62204]: DEBUG nova.policy [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52fc19cbbaf14319a258f952c739c137', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd93f6aa3eaad4c5b91b657e75854f45f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1075.856873] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 431e7b20-22d8-4742-9c47-cdf9ee08fb32] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.095684] env[62204]: DEBUG nova.network.neutron [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Successfully created port: 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.168286] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200443, 'name': ReconfigVM_Task, 'duration_secs': 0.175871} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.168646] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1076.177817] env[62204]: DEBUG oslo_concurrency.lockutils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.231809] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "refresh_cache-0085a64c-583b-4d27-b39f-19ee32d67ab4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1076.273754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-86bafa93-8c60-489b-97d9-c33adbf138cc tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.308s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.277372] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1076.319716] env[62204]: DEBUG nova.network.neutron [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.360049] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 7412d7ef-b370-4253-8d57-d2bd5d06d6a9] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.396837] env[62204]: DEBUG nova.network.neutron [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.674834] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1076.675094] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1076.675301] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1076.675508] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1076.675661] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1076.675815] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1076.676036] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1076.676218] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1076.676390] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1076.676558] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1076.676733] env[62204]: DEBUG nova.virt.hardware [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1076.682222] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1076.682546] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e17b17f4-93fb-451f-a202-9c63e8c2882a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.702286] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1076.702286] env[62204]: value = "task-1200444" [ 1076.702286] env[62204]: _type = "Task" [ 1076.702286] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.711463] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.863477] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: a2a37a1b-3ef0-4be7-924c-66c7a1583b68] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1076.899242] env[62204]: DEBUG oslo_concurrency.lockutils [req-7d05ae6a-cec0-492f-90bd-07caa55661ae req-36ae9cf4-030d-447b-8e17-f1e03ead61e6 service nova] Releasing lock "refresh_cache-0085a64c-583b-4d27-b39f-19ee32d67ab4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1076.899811] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "refresh_cache-0085a64c-583b-4d27-b39f-19ee32d67ab4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.899978] env[62204]: DEBUG nova.network.neutron [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1077.213384] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.243772] env[62204]: DEBUG oslo_concurrency.lockutils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.244044] env[62204]: DEBUG oslo_concurrency.lockutils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.244292] env[62204]: INFO nova.compute.manager [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Attaching volume 45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f to /dev/sdb [ 1077.274198] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f27a06-e292-47ea-98e5-b3618a1dbb3a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.281453] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6359c70f-4cb3-431b-8cef-bd7147434fb1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.286274] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1077.296318] env[62204]: DEBUG nova.virt.block_device [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Updating existing volume attachment record: cdc3d2fa-199c-43e4-b887-363bbb02c3ce {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1077.312021] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1077.312268] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1077.312441] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1077.312627] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1077.312774] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1077.312923] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1077.313145] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1077.313315] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1077.313899] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1077.313899] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1077.313899] env[62204]: DEBUG nova.virt.hardware [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1077.314666] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648b1f56-5726-470e-8c09-95aa17e83220 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.322665] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56fc6cf-1bbb-4ebb-9b03-f8fea88dcfbe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.367180] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 2b728904-19ef-4773-9260-c615da522801] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1077.573490] env[62204]: DEBUG nova.network.neutron [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Successfully updated port: 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1077.634211] env[62204]: DEBUG nova.network.neutron [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1077.715798] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200444, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.765757] env[62204]: DEBUG nova.network.neutron [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Updating instance_info_cache with network_info: [{"id": "371d9a0f-9978-426a-8031-38da073e9e35", "address": "fa:16:3e:3d:13:97", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap371d9a0f-99", "ovs_interfaceid": "371d9a0f-9978-426a-8031-38da073e9e35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.817340] env[62204]: DEBUG nova.compute.manager [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-vif-plugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1077.817340] env[62204]: DEBUG oslo_concurrency.lockutils [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.817340] env[62204]: DEBUG oslo_concurrency.lockutils [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1077.817670] env[62204]: DEBUG oslo_concurrency.lockutils [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.817670] env[62204]: DEBUG nova.compute.manager [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] No waiting events found dispatching network-vif-plugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1077.817824] env[62204]: WARNING nova.compute.manager [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received unexpected event network-vif-plugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d for instance with vm_state building and task_state spawning. [ 1077.818053] env[62204]: DEBUG nova.compute.manager [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1077.818238] env[62204]: DEBUG nova.compute.manager [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing instance network info cache due to event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1077.818429] env[62204]: DEBUG oslo_concurrency.lockutils [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1077.818568] env[62204]: DEBUG oslo_concurrency.lockutils [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.818724] env[62204]: DEBUG nova.network.neutron [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1077.870733] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 25563dec-7e4d-42d9-b922-0b2354b5d70e] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.081156] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.214073] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200444, 'name': ReconfigVM_Task, 'duration_secs': 1.235261} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.214400] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1078.215216] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9f4228-07ae-4e4f-8e57-4f2530cc0779 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.237566] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36/27ecfd31-6c25-436b-a2fa-27a40f1b0f36.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.237883] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ecc9790-018f-4173-9b74-495c6ca939c6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.258062] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1078.258062] env[62204]: value = "task-1200446" [ 1078.258062] env[62204]: _type = "Task" [ 1078.258062] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.266447] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.267999] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "refresh_cache-0085a64c-583b-4d27-b39f-19ee32d67ab4" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.268329] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Instance network_info: |[{"id": "371d9a0f-9978-426a-8031-38da073e9e35", "address": "fa:16:3e:3d:13:97", "network": {"id": "f408e7f4-3d71-4d51-a585-e02a7690282e", "bridge": "br-int", "label": "tempest-ServersTestJSON-1969679187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7cc2d3674b2a4fa3806dc0286481368e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap371d9a0f-99", "ovs_interfaceid": "371d9a0f-9978-426a-8031-38da073e9e35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1078.268720] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:13:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '371d9a0f-9978-426a-8031-38da073e9e35', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1078.275826] env[62204]: DEBUG oslo.service.loopingcall [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1078.276045] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1078.276277] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e3762ecc-4f08-4eb2-b08e-d3d206235494 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.297495] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1078.297495] env[62204]: value = "task-1200447" [ 1078.297495] env[62204]: _type = "Task" [ 1078.297495] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.305937] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200447, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.352201] env[62204]: DEBUG nova.network.neutron [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1078.374227] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 6dc170a4-b08e-44b5-a152-832670e6866b] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.435708] env[62204]: DEBUG nova.network.neutron [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.768649] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200446, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.807052] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200447, 'name': CreateVM_Task, 'duration_secs': 0.313478} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.807248] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1078.807946] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1078.808134] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.808485] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1078.808733] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9e50af8-c1b2-41ae-956a-5de1666cc1f4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.813426] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1078.813426] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5207e146-197c-4610-0ed2-781b8f1b9522" [ 1078.813426] env[62204]: _type = "Task" [ 1078.813426] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.823861] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5207e146-197c-4610-0ed2-781b8f1b9522, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.878162] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: f5f0c15f-ae0d-4615-93ab-3203a5d7e090] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1078.938714] env[62204]: DEBUG oslo_concurrency.lockutils [req-19c12042-d061-4417-baf9-59df3975a643 req-55964c11-c0d3-40f9-8779-c5eee84f84da service nova] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1078.940066] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.940066] env[62204]: DEBUG nova.network.neutron [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1079.269399] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200446, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.324182] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5207e146-197c-4610-0ed2-781b8f1b9522, 'name': SearchDatastore_Task, 'duration_secs': 0.010523} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.324492] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1079.324735] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1079.324967] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1079.325138] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.326089] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1079.326089] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-150aa67b-890b-4834-9f4e-9ee18166c2d2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.334041] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1079.334279] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1079.334896] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8f3a53f-9981-4d9c-838c-ec632a732772 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.340409] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1079.340409] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dfffb9-313f-f568-0cfc-e239f3b730f7" [ 1079.340409] env[62204]: _type = "Task" [ 1079.340409] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.347693] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dfffb9-313f-f568-0cfc-e239f3b730f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.381307] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 60eaec9c-5dcc-4e2f-9649-78acba318a6b] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1079.470552] env[62204]: DEBUG nova.network.neutron [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1079.600035] env[62204]: DEBUG nova.network.neutron [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68464bf7-61", "ovs_interfaceid": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.769476] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200446, 'name': ReconfigVM_Task, 'duration_secs': 1.329885} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.769860] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36/27ecfd31-6c25-436b-a2fa-27a40f1b0f36.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1079.770184] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.851420] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52dfffb9-313f-f568-0cfc-e239f3b730f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009006} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.852200] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2397a082-5f97-4641-9912-c7d13c866ddd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.858164] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1079.858164] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52efdd50-0bb1-c943-2740-9e8d86d0d719" [ 1079.858164] env[62204]: _type = "Task" [ 1079.858164] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.865946] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52efdd50-0bb1-c943-2740-9e8d86d0d719, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.885208] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 57e14d47-1d3f-4fed-93c1-11cfc17dc9bc] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.102947] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.103306] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance network_info: |[{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68464bf7-61", "ovs_interfaceid": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1080.103761] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:2c:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68464bf7-61a8-4b7f-bbd7-a546e9e3f17d', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.111619] env[62204]: DEBUG oslo.service.loopingcall [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1080.111886] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.112091] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ed4896a-f9f9-4af3-8604-2f33407e3432 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.132961] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.132961] env[62204]: value = "task-1200449" [ 1080.132961] env[62204]: _type = "Task" [ 1080.132961] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.140786] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200449, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.279293] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcaa4635-f082-4feb-bff5-f4a31cbe69c3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.300516] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1374366-5c28-4def-9b2d-8d887670d800 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.320733] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.369181] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52efdd50-0bb1-c943-2740-9e8d86d0d719, 'name': SearchDatastore_Task, 'duration_secs': 0.009599} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.369485] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1080.369750] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 0085a64c-583b-4d27-b39f-19ee32d67ab4/0085a64c-583b-4d27-b39f-19ee32d67ab4.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1080.370027] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd65106b-2d9e-42bd-9820-ace34031904d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.377462] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1080.377462] env[62204]: value = "task-1200450" [ 1080.377462] env[62204]: _type = "Task" [ 1080.377462] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.385958] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.388647] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 1a1cb81f-383e-48de-8c11-3d5e2c801f40] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.644644] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200449, 'name': CreateVM_Task, 'duration_secs': 0.321184} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.644829] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1080.645600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1080.645766] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.646128] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1080.646402] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10c037d2-bebb-4e42-93f2-06e74a50b244 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.651912] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1080.651912] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bcc771-4ee6-f2e4-1700-e3d2910a6cae" [ 1080.651912] env[62204]: _type = "Task" [ 1080.651912] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.660817] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bcc771-4ee6-f2e4-1700-e3d2910a6cae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.861919] env[62204]: DEBUG nova.network.neutron [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Port 34d2b53b-7f04-471d-b817-7fa154770046 binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1080.889102] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200450, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44159} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.889468] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 0085a64c-583b-4d27-b39f-19ee32d67ab4/0085a64c-583b-4d27-b39f-19ee32d67ab4.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1080.889563] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1080.889812] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-547b9b5b-c85a-4ba1-b293-f7b76461f69a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.891860] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 1121b1b8-127e-475f-8dfc-de43911de39a] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1080.898914] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1080.898914] env[62204]: value = "task-1200451" [ 1080.898914] env[62204]: _type = "Task" [ 1080.898914] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.907844] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.163459] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52bcc771-4ee6-f2e4-1700-e3d2910a6cae, 'name': SearchDatastore_Task, 'duration_secs': 0.044007} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.163737] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1081.163980] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.164263] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1081.164417] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.164606] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.164894] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c208fc71-8b12-466d-b861-c1ea0ace4081 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.180728] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.180910] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.181653] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9836bd23-ef1b-43a5-b908-f7a6a8488f21 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.187184] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1081.187184] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52777074-fd83-5735-0b1a-ee0164fcc675" [ 1081.187184] env[62204]: _type = "Task" [ 1081.187184] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.194359] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52777074-fd83-5735-0b1a-ee0164fcc675, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.394586] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 67ee5c4d-3825-4580-a26e-74eb8da50883] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.409486] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.345456} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.410491] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1081.411272] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b733b47-abcd-4d4f-8d53-9564f8b47a1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.435405] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 0085a64c-583b-4d27-b39f-19ee32d67ab4/0085a64c-583b-4d27-b39f-19ee32d67ab4.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.435849] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bedc039-a942-44df-b139-b86babb42f51 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.455255] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1081.455255] env[62204]: value = "task-1200452" [ 1081.455255] env[62204]: _type = "Task" [ 1081.455255] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.463567] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.698164] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52777074-fd83-5735-0b1a-ee0164fcc675, 'name': SearchDatastore_Task, 'duration_secs': 0.03335} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.698926] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4abb2d42-c1aa-4524-aec6-72acf152c355 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.704506] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1081.704506] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a9bdca-53bf-c71d-9e44-e990bbc07f27" [ 1081.704506] env[62204]: _type = "Task" [ 1081.704506] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.712468] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a9bdca-53bf-c71d-9e44-e990bbc07f27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.841842] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1081.842205] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260146', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'name': 'volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba', 'attached_at': '', 'detached_at': '', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'serial': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1081.843208] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d43355c-c0a2-4f35-bcd2-83b12a2f4b47 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.861706] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e5fa99-2992-4ebe-ab6c-a516d67c436d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.895508] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f/volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1081.898407] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77db1109-5f09-4a66-bd9a-eee5fbe2d04e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.910993] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 2c393123-87de-460a-965d-43473478a79f] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1081.916463] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.916698] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.916878] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.925681] env[62204]: DEBUG oslo_vmware.api [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1081.925681] env[62204]: value = "task-1200453" [ 1081.925681] env[62204]: _type = "Task" [ 1081.925681] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.938942] env[62204]: DEBUG oslo_vmware.api [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.965827] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200452, 'name': ReconfigVM_Task, 'duration_secs': 0.275329} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.966094] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 0085a64c-583b-4d27-b39f-19ee32d67ab4/0085a64c-583b-4d27-b39f-19ee32d67ab4.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.966715] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f69e2d1b-9a6f-4a83-b7ee-bc1cd1e6ad2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.972712] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1081.972712] env[62204]: value = "task-1200454" [ 1081.972712] env[62204]: _type = "Task" [ 1081.972712] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.983158] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200454, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.215553] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52a9bdca-53bf-c71d-9e44-e990bbc07f27, 'name': SearchDatastore_Task, 'duration_secs': 0.037566} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.215819] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1082.216108] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.216342] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b256cb82-f2fe-48d3-8fe5-059b56e40c8f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.223256] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1082.223256] env[62204]: value = "task-1200455" [ 1082.223256] env[62204]: _type = "Task" [ 1082.223256] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.230947] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.417915] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: dba1edda-edfd-4a97-ab95-48f3f5a933f8] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.440498] env[62204]: DEBUG oslo_vmware.api [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200453, 'name': ReconfigVM_Task, 'duration_secs': 0.371867} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.441072] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f/volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1082.449959] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12763be0-c070-4d4a-b602-3e8f38f5e204 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.475763] env[62204]: DEBUG oslo_vmware.api [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1082.475763] env[62204]: value = "task-1200456" [ 1082.475763] env[62204]: _type = "Task" [ 1082.475763] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.495755] env[62204]: DEBUG oslo_vmware.api [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200456, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.501212] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200454, 'name': Rename_Task, 'duration_secs': 0.130929} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.501806] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.502281] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d3dd75c-e954-43fc-ae60-9be29e7ab491 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.511539] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1082.511539] env[62204]: value = "task-1200457" [ 1082.511539] env[62204]: _type = "Task" [ 1082.511539] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.520704] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200457, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.734996] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200455, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.923066] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: cce823b9-6a03-4902-9794-2b93f99eef94] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1082.956793] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1082.957011] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.957251] env[62204]: DEBUG nova.network.neutron [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1082.991936] env[62204]: DEBUG oslo_vmware.api [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200456, 'name': ReconfigVM_Task, 'duration_secs': 0.317396} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.992244] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260146', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'name': 'volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba', 'attached_at': '', 'detached_at': '', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'serial': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1083.021758] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200457, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.234802] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534605} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.235101] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.235346] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.235619] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3dc1c411-e096-4492-a910-55713daf807d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.243235] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1083.243235] env[62204]: value = "task-1200458" [ 1083.243235] env[62204]: _type = "Task" [ 1083.243235] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.252675] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200458, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.429398] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: c0990e53-70c9-4536-b26a-bc00bd457c56] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1083.523689] env[62204]: DEBUG oslo_vmware.api [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200457, 'name': PowerOnVM_Task, 'duration_secs': 0.602928} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.523984] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.524350] env[62204]: INFO nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1083.524402] env[62204]: DEBUG nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1083.525331] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c01ab30-1b64-48c7-83a1-53f38b0556b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.700060] env[62204]: DEBUG nova.network.neutron [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [{"id": "34d2b53b-7f04-471d-b817-7fa154770046", "address": "fa:16:3e:ec:e1:09", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d2b53b-7f", "ovs_interfaceid": "34d2b53b-7f04-471d-b817-7fa154770046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.753785] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200458, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068495} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.754045] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.754816] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a090b4-a93d-4514-9142-6be08569031e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.777114] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.777693] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1067ccec-5146-4545-a026-ecc34ae3beeb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.797645] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1083.797645] env[62204]: value = "task-1200459" [ 1083.797645] env[62204]: _type = "Task" [ 1083.797645] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.806062] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200459, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.935580] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: d6370e37-6f73-4334-8057-a30aa2c39682] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.031978] env[62204]: DEBUG nova.objects.instance [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'flavor' on Instance uuid c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.047204] env[62204]: INFO nova.compute.manager [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Took 15.49 seconds to build instance. [ 1084.202567] env[62204]: DEBUG oslo_concurrency.lockutils [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1084.309475] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200459, 'name': ReconfigVM_Task, 'duration_secs': 0.301115} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.309706] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.310372] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c3ceb25-05e1-4799-ac10-425544bed853 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.318459] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1084.318459] env[62204]: value = "task-1200460" [ 1084.318459] env[62204]: _type = "Task" [ 1084.318459] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.327289] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200460, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.440169] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 137ce499-6602-46b5-b1eb-b03282c2bab3] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1084.537754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-625c633f-4922-4b4a-a5dd-119e43d5857a tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.293s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.550101] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c9d13622-baf6-43be-ac22-dab97df1ec8b tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.004s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.723551] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.723766] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.727862] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd1317a-faf6-422a-bb5f-cc1ac384db2d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.749829] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885f4dff-49a5-4b43-a61d-7ebe5abc0bda {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.757832] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1084.828846] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200460, 'name': Rename_Task, 'duration_secs': 0.142583} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.829098] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1084.829353] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b30fad6-b497-4eaa-9649-fee9a4f28347 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.836881] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1084.836881] env[62204]: value = "task-1200461" [ 1084.836881] env[62204]: _type = "Task" [ 1084.836881] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.844572] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200461, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.943552] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: ba0a4ac7-c2db-4c21-b44c-a37e1dc1e702] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.226459] env[62204]: INFO nova.compute.manager [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Detaching volume 45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f [ 1085.257296] env[62204]: INFO nova.virt.block_device [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Attempting to driver detach volume 45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f from mountpoint /dev/sdb [ 1085.257580] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1085.257779] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260146', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'name': 'volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba', 'attached_at': '', 'detached_at': '', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'serial': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1085.258718] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f44530-6bfd-4fcd-8198-144d0c5183f9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.263430] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.263681] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a32fd8a-20a7-45a8-bd07-de7e57ceeb80 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.281769] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec63989-5f83-40b8-aae0-3e4d0a57626f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.287015] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1085.287015] env[62204]: value = "task-1200462" [ 1085.287015] env[62204]: _type = "Task" [ 1085.287015] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.292510] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdfb211-44a8-4056-b33f-14a4ba29fc84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.297368] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200462, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.320456] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47986384-18de-46af-ba21-1eaa3217532c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.336738] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] The volume has not been displaced from its original location: [datastore1] volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f/volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1085.341776] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1085.342157] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43138b1e-9099-4f6e-9f6f-8a14b5ea9473 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.363143] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200461, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.364656] env[62204]: DEBUG oslo_vmware.api [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1085.364656] env[62204]: value = "task-1200463" [ 1085.364656] env[62204]: _type = "Task" [ 1085.364656] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.375146] env[62204]: DEBUG oslo_vmware.api [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200463, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.449957] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 703bf0c4-9bff-4967-8e84-09969b32b5a1] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.462069] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.462069] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.462290] env[62204]: DEBUG nova.compute.manager [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.463100] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a20e64-439e-4745-a02b-44d7d30bf53b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.470789] env[62204]: DEBUG nova.compute.manager [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1085.471432] env[62204]: DEBUG nova.objects.instance [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'flavor' on Instance uuid 0085a64c-583b-4d27-b39f-19ee32d67ab4 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.798436] env[62204]: DEBUG oslo_vmware.api [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200462, 'name': PowerOnVM_Task, 'duration_secs': 0.385497} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.798716] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.798906] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-840b00ed-65cc-401d-a039-21bc46a9c22d tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance '27ecfd31-6c25-436b-a2fa-27a40f1b0f36' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1085.854014] env[62204]: DEBUG oslo_vmware.api [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200461, 'name': PowerOnVM_Task, 'duration_secs': 0.849514} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.854300] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.854508] env[62204]: INFO nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1085.854688] env[62204]: DEBUG nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1085.855476] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a390af8d-4570-415c-aa5b-8a02339911d0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.873782] env[62204]: DEBUG oslo_vmware.api [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200463, 'name': ReconfigVM_Task, 'duration_secs': 0.226674} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.874060] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1085.879019] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b71ea702-045d-47df-8419-ebdbef579cce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.897858] env[62204]: DEBUG oslo_vmware.api [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1085.897858] env[62204]: value = "task-1200464" [ 1085.897858] env[62204]: _type = "Task" [ 1085.897858] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.907329] env[62204]: DEBUG oslo_vmware.api [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.953897] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 4793e9fd-be87-4885-8f0e-1fcef6ce4d2f] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1085.976342] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1085.976644] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f31c380-e1ae-4b2f-a406-7613af6ba20c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.985528] env[62204]: DEBUG oslo_vmware.api [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1085.985528] env[62204]: value = "task-1200465" [ 1085.985528] env[62204]: _type = "Task" [ 1085.985528] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.996156] env[62204]: DEBUG oslo_vmware.api [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.377961] env[62204]: INFO nova.compute.manager [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Took 15.14 seconds to build instance. [ 1086.407847] env[62204]: DEBUG oslo_vmware.api [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200464, 'name': ReconfigVM_Task, 'duration_secs': 0.244788} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.408185] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260146', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'name': 'volume-45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba', 'attached_at': '', 'detached_at': '', 'volume_id': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f', 'serial': '45eba9b7-5a07-469e-8ca3-7aa1c7dbcf6f'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1086.456827] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 55d1649c-5eff-4264-bce1-dd907f9531f2] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1086.496412] env[62204]: DEBUG oslo_vmware.api [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200465, 'name': PowerOffVM_Task, 'duration_secs': 0.256502} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.496671] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1086.496851] env[62204]: DEBUG nova.compute.manager [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1086.497682] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239c8508-f2f9-4e55-ba6c-2d24bf543669 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.597204] env[62204]: DEBUG nova.compute.manager [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1086.597428] env[62204]: DEBUG nova.compute.manager [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing instance network info cache due to event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1086.597720] env[62204]: DEBUG oslo_concurrency.lockutils [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1086.597792] env[62204]: DEBUG oslo_concurrency.lockutils [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.597960] env[62204]: DEBUG nova.network.neutron [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1086.880612] env[62204]: DEBUG oslo_concurrency.lockutils [None req-929e48d3-865f-482e-b0e3-6cdae6aef496 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.653s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1086.952150] env[62204]: DEBUG nova.objects.instance [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'flavor' on Instance uuid c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.959736] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 2727dc46-98ed-435d-89ef-41bc20cda776] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.009433] env[62204]: DEBUG oslo_concurrency.lockutils [None req-6fe6de44-8187-4d83-a0c7-2c5b86598481 tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.547s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.354849] env[62204]: DEBUG nova.network.neutron [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updated VIF entry in instance network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1087.355289] env[62204]: DEBUG nova.network.neutron [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68464bf7-61", "ovs_interfaceid": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.462827] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: eca3ea4c-e212-4c3d-8dbe-a02c0d7dc948] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1087.857850] env[62204]: DEBUG oslo_concurrency.lockutils [req-b3ea6895-f434-4716-83a3-ecba9e6c4afa req-6d219ca5-3a9e-4895-9eb0-e205976293b0 service nova] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.959946] env[62204]: DEBUG oslo_concurrency.lockutils [None req-31862250-8287-4977-9f6a-258b23d6d159 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.236s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1087.964985] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 51c9e353-f2cf-41b4-b37e-1cfd5dca0518] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.195938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.195938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.195938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "0085a64c-583b-4d27-b39f-19ee32d67ab4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.195938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.195938] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.195938] env[62204]: INFO nova.compute.manager [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Terminating instance [ 1088.195938] env[62204]: DEBUG nova.compute.manager [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1088.195938] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.195938] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7a467c-9911-44c1-9a59-4cdd39195874 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.204166] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1088.204452] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ff94bd0-f7aa-4aae-901f-904b7f111271 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.275583] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1088.275826] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1088.276071] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore1] 0085a64c-583b-4d27-b39f-19ee32d67ab4 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1088.276296] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bb5775e-b79a-46ca-b488-99fc804e6f56 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.283011] env[62204]: DEBUG oslo_vmware.api [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1088.283011] env[62204]: value = "task-1200467" [ 1088.283011] env[62204]: _type = "Task" [ 1088.283011] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.291915] env[62204]: DEBUG oslo_vmware.api [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.468408] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 69604167-6a61-4723-bf7d-7ba168837839] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.741385] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.741699] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.741950] env[62204]: DEBUG nova.compute.manager [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Going to confirm migration 6 {{(pid=62204) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1088.793667] env[62204]: DEBUG oslo_vmware.api [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136361} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.794090] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.794685] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.794685] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.794685] env[62204]: INFO nova.compute.manager [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1088.794972] env[62204]: DEBUG oslo.service.loopingcall [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1088.795215] env[62204]: DEBUG nova.compute.manager [-] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1088.795312] env[62204]: DEBUG nova.network.neutron [-] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1088.972311] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: a71fd192-f3b6-4f0f-900d-887d15f44d7a] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1088.982257] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.982615] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.982861] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.983072] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.983313] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1088.985818] env[62204]: INFO nova.compute.manager [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Terminating instance [ 1088.988073] env[62204]: DEBUG nova.compute.manager [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1088.988314] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1088.989249] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda1b756-220b-4008-9d9d-8bbd15bc92d9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.000888] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.001725] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7200a9f9-6737-406a-bc3b-33ddc018163b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.012622] env[62204]: DEBUG oslo_vmware.api [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1089.012622] env[62204]: value = "task-1200468" [ 1089.012622] env[62204]: _type = "Task" [ 1089.012622] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.022014] env[62204]: DEBUG oslo_vmware.api [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.093124] env[62204]: DEBUG nova.compute.manager [req-bc226344-b658-4ad0-b0be-f2693f37007c req-eaa8779a-5983-4565-aaca-0f0cc7968458 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Received event network-vif-deleted-371d9a0f-9978-426a-8031-38da073e9e35 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1089.093621] env[62204]: INFO nova.compute.manager [req-bc226344-b658-4ad0-b0be-f2693f37007c req-eaa8779a-5983-4565-aaca-0f0cc7968458 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Neutron deleted interface 371d9a0f-9978-426a-8031-38da073e9e35; detaching it from the instance and deleting it from the info cache [ 1089.093621] env[62204]: DEBUG nova.network.neutron [req-bc226344-b658-4ad0-b0be-f2693f37007c req-eaa8779a-5983-4565-aaca-0f0cc7968458 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.249862] env[62204]: DEBUG nova.compute.manager [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Stashing vm_state: active {{(pid=62204) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 1089.295398] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1089.295608] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquired lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.295794] env[62204]: DEBUG nova.network.neutron [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1089.295981] env[62204]: DEBUG nova.objects.instance [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'info_cache' on Instance uuid 27ecfd31-6c25-436b-a2fa-27a40f1b0f36 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.475707] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 0a4a432d-a71a-4da7-be90-25dcec5a64c6] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1089.523568] env[62204]: DEBUG oslo_vmware.api [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200468, 'name': PowerOffVM_Task, 'duration_secs': 0.230154} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.523867] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.524060] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.524319] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77f1b467-1d71-4c4d-b8f2-5c9249147560 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.563898] env[62204]: DEBUG nova.network.neutron [-] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.591626] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.591880] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.592152] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleting the datastore file [datastore1] c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.592460] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95514718-6d1f-4579-9fea-11157fe55bac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.596484] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e901c000-c2d6-4eb9-97e9-d6f9ba87d14a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.600521] env[62204]: DEBUG oslo_vmware.api [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1089.600521] env[62204]: value = "task-1200470" [ 1089.600521] env[62204]: _type = "Task" [ 1089.600521] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.607792] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db24825-7ff3-4f15-b9cb-87b9074fdcaf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.621277] env[62204]: DEBUG oslo_vmware.api [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.639371] env[62204]: DEBUG nova.compute.manager [req-bc226344-b658-4ad0-b0be-f2693f37007c req-eaa8779a-5983-4565-aaca-0f0cc7968458 service nova] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Detach interface failed, port_id=371d9a0f-9978-426a-8031-38da073e9e35, reason: Instance 0085a64c-583b-4d27-b39f-19ee32d67ab4 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1089.769544] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1089.769660] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1089.978857] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 432115aa-8999-40fe-a0cb-31433575c912] Instance has had 0 of 5 cleanup attempts {{(pid=62204) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1090.066653] env[62204]: INFO nova.compute.manager [-] [instance: 0085a64c-583b-4d27-b39f-19ee32d67ab4] Took 1.27 seconds to deallocate network for instance. [ 1090.111280] env[62204]: DEBUG oslo_vmware.api [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189786} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.111554] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.111741] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.111918] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.112116] env[62204]: INFO nova.compute.manager [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1090.112362] env[62204]: DEBUG oslo.service.loopingcall [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1090.112557] env[62204]: DEBUG nova.compute.manager [-] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1090.112652] env[62204]: DEBUG nova.network.neutron [-] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1090.274639] env[62204]: INFO nova.compute.claims [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1090.484400] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.484577] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Cleaning up deleted instances with incomplete migration {{(pid=62204) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1090.551263] env[62204]: DEBUG nova.network.neutron [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [{"id": "34d2b53b-7f04-471d-b817-7fa154770046", "address": "fa:16:3e:ec:e1:09", "network": {"id": "569f9264-e88a-4d94-b82a-9aaf9823803a", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1367334725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6907df6f17b142c0b4881f15f3b88a9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4105fae7-3de1-4a6c-a44b-39097c174699", "external-id": "nsx-vlan-transportzone-773", "segmentation_id": 773, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34d2b53b-7f", "ovs_interfaceid": "34d2b53b-7f04-471d-b817-7fa154770046", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.573264] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.781412] env[62204]: INFO nova.compute.resource_tracker [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating resource usage from migration f1a1a5ec-6371-49f6-ae6a-4fc07bb9e46a [ 1090.901620] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a719e59-0824-4ada-a203-076f50617d4b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.909777] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e508d0cb-1823-46e5-b4c3-b22b3eb05a61 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.940356] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95c31e3-97ee-494a-b1ac-c0499b54b52d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.948341] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668f6132-c312-4aa1-ad43-60bd41cdd5ba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.962785] env[62204]: DEBUG nova.compute.provider_tree [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.986894] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.054338] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Releasing lock "refresh_cache-27ecfd31-6c25-436b-a2fa-27a40f1b0f36" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.054521] env[62204]: DEBUG nova.objects.instance [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lazy-loading 'migration_context' on Instance uuid 27ecfd31-6c25-436b-a2fa-27a40f1b0f36 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.072049] env[62204]: DEBUG nova.network.neutron [-] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.122591] env[62204]: DEBUG nova.compute.manager [req-3072a7b5-6aac-4bff-81a8-9fa3434778ff req-74258fc2-efb2-4cfe-a1ff-fd7eab03ac84 service nova] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Received event network-vif-deleted-043fbcf5-77f4-4656-91a6-01f89dd81e08 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1091.466074] env[62204]: DEBUG nova.scheduler.client.report [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1091.558259] env[62204]: DEBUG nova.objects.base [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Object Instance<27ecfd31-6c25-436b-a2fa-27a40f1b0f36> lazy-loaded attributes: info_cache,migration_context {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1091.559204] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f95a24d-e48f-4cb1-b9c1-dad1cc1f4d64 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.578632] env[62204]: INFO nova.compute.manager [-] [instance: c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba] Took 1.47 seconds to deallocate network for instance. [ 1091.580972] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14d0fba7-3156-455e-b614-32b55ae3bbb3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.589177] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1091.589177] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524eaf30-e30b-4f97-84a5-5da443922c30" [ 1091.589177] env[62204]: _type = "Task" [ 1091.589177] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.597946] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524eaf30-e30b-4f97-84a5-5da443922c30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.971026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.201s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.971237] env[62204]: INFO nova.compute.manager [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Migrating [ 1091.977863] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.405s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.978101] env[62204]: DEBUG nova.objects.instance [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid 0085a64c-583b-4d27-b39f-19ee32d67ab4 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1092.087598] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.100104] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]524eaf30-e30b-4f97-84a5-5da443922c30, 'name': SearchDatastore_Task, 'duration_secs': 0.009585} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.100238] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.488885] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1092.489144] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.489144] env[62204]: DEBUG nova.network.neutron [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1092.596533] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0899eac-f073-48e9-9a5f-e639cf015454 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.604261] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a9e236-5856-4d5b-9fc1-8fc9d7b7411e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.634060] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6cfb1b-5376-46e8-a5f7-e092836f02b1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.641778] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d8b05e-202f-444c-89c9-0b89e2040474 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.654837] env[62204]: DEBUG nova.compute.provider_tree [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.158345] env[62204]: DEBUG nova.scheduler.client.report [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1093.219283] env[62204]: DEBUG nova.network.neutron [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.662928] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.665295] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.578s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.665548] env[62204]: DEBUG nova.objects.instance [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'resources' on Instance uuid c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.682007] env[62204]: INFO nova.scheduler.client.report [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance 0085a64c-583b-4d27-b39f-19ee32d67ab4 [ 1093.721944] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1094.188423] env[62204]: DEBUG oslo_concurrency.lockutils [None req-b15f0862-ee8f-4ceb-94ef-c86b9ecc94cf tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "0085a64c-583b-4d27-b39f-19ee32d67ab4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.999s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.273667] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a8b63a-2b71-42a2-96d4-6a7979be871c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.281070] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61838c0c-dbd9-49b2-98d6-5149b691350a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.311405] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b63cdb-4641-4b15-a996-5432d00c0683 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.318344] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb28026c-b3ad-4c17-8e2c-820ecae0a61d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.330951] env[62204]: DEBUG nova.compute.provider_tree [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.833949] env[62204]: DEBUG nova.scheduler.client.report [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1095.019651] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "98805916-8501-4afb-9e1c-a5393f6e5557" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.020041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "98805916-8501-4afb-9e1c-a5393f6e5557" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.020280] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "98805916-8501-4afb-9e1c-a5393f6e5557-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.020478] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "98805916-8501-4afb-9e1c-a5393f6e5557-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.020652] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "98805916-8501-4afb-9e1c-a5393f6e5557-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.024593] env[62204]: INFO nova.compute.manager [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Terminating instance [ 1095.026445] env[62204]: DEBUG nova.compute.manager [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1095.026642] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.027537] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1e0ef8-0498-459e-a83a-1c44f5c6e7a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.035473] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.035473] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4835b01e-212c-42e8-8f1b-f40397648d31 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.041748] env[62204]: DEBUG oslo_vmware.api [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1095.041748] env[62204]: value = "task-1200471" [ 1095.041748] env[62204]: _type = "Task" [ 1095.041748] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.049358] env[62204]: DEBUG oslo_vmware.api [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.240029] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bdfb5f-bb4e-4338-b413-5cba6ca01eb9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.256410] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 0 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1095.338754] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.673s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.341242] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.241s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.356923] env[62204]: INFO nova.scheduler.client.report [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted allocations for instance c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba [ 1095.552180] env[62204]: DEBUG oslo_vmware.api [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200471, 'name': PowerOffVM_Task, 'duration_secs': 0.218591} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.552489] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.552659] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.552916] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed4007d6-af7a-4830-9595-45b69f5aba4e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.614818] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1095.615073] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1095.615290] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleting the datastore file [datastore2] 98805916-8501-4afb-9e1c-a5393f6e5557 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.615551] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adb1549b-7f79-4d1c-9bc0-bff812f2755a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.622357] env[62204]: DEBUG oslo_vmware.api [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for the task: (returnval){ [ 1095.622357] env[62204]: value = "task-1200473" [ 1095.622357] env[62204]: _type = "Task" [ 1095.622357] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.630167] env[62204]: DEBUG oslo_vmware.api [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200473, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.762682] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.763023] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4aaebcd-2e52-4325-b661-c606b8e2882c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.769070] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1095.769070] env[62204]: value = "task-1200474" [ 1095.769070] env[62204]: _type = "Task" [ 1095.769070] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.776580] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.865750] env[62204]: DEBUG oslo_concurrency.lockutils [None req-04457b72-ed0e-49a6-a03d-c32e9716c761 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "c4d4908f-e4a6-461f-8ac5-8dd2a66e37ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.883s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.947241] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acaabba7-818c-4352-b3c1-7a39156ff43d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.954493] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f43e42-7c22-47a7-946d-ef1d95b9a9ad {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.985366] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff39f8d-a733-4ace-9308-000f3a0aaf08 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.992499] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6936131f-71cc-4a09-bddf-747ee42549ee {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.005931] env[62204]: DEBUG nova.compute.provider_tree [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.132233] env[62204]: DEBUG oslo_vmware.api [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Task: {'id': task-1200473, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131988} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.132547] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.132749] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1096.132936] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.133136] env[62204]: INFO nova.compute.manager [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1096.133393] env[62204]: DEBUG oslo.service.loopingcall [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1096.133621] env[62204]: DEBUG nova.compute.manager [-] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1096.133745] env[62204]: DEBUG nova.network.neutron [-] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1096.279827] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200474, 'name': PowerOffVM_Task, 'duration_secs': 0.243384} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.280243] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.280336] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 17 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1096.370704] env[62204]: DEBUG nova.compute.manager [req-80aefd9c-1fac-4cb5-807d-9245c86e58ac req-b1cb3400-ec39-4101-8f42-9ebf85da340f service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Received event network-vif-deleted-aae4d007-4d74-4c2c-9d2c-6803c004abe1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1096.370978] env[62204]: INFO nova.compute.manager [req-80aefd9c-1fac-4cb5-807d-9245c86e58ac req-b1cb3400-ec39-4101-8f42-9ebf85da340f service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Neutron deleted interface aae4d007-4d74-4c2c-9d2c-6803c004abe1; detaching it from the instance and deleting it from the info cache [ 1096.371225] env[62204]: DEBUG nova.network.neutron [req-80aefd9c-1fac-4cb5-807d-9245c86e58ac req-b1cb3400-ec39-4101-8f42-9ebf85da340f service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.508650] env[62204]: DEBUG nova.scheduler.client.report [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1096.786873] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:35Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1096.787874] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1096.787874] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1096.787874] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1096.787874] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1096.787874] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1096.788139] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1096.788202] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1096.788370] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1096.788531] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1096.788705] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1096.793748] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89ad8963-080b-47b7-bab8-a80362834625 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.809326] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1096.809326] env[62204]: value = "task-1200475" [ 1096.809326] env[62204]: _type = "Task" [ 1096.809326] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.818550] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200475, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.847127] env[62204]: DEBUG nova.network.neutron [-] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.874187] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58385a49-2195-445d-bc0c-798b2cbfeeaa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.883867] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1a5b6f-21e1-41aa-9dce-0ac3a995af5d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.913150] env[62204]: DEBUG nova.compute.manager [req-80aefd9c-1fac-4cb5-807d-9245c86e58ac req-b1cb3400-ec39-4101-8f42-9ebf85da340f service nova] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Detach interface failed, port_id=aae4d007-4d74-4c2c-9d2c-6803c004abe1, reason: Instance 98805916-8501-4afb-9e1c-a5393f6e5557 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1097.319226] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200475, 'name': ReconfigVM_Task, 'duration_secs': 0.149479} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.319550] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 33 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1097.349460] env[62204]: INFO nova.compute.manager [-] [instance: 98805916-8501-4afb-9e1c-a5393f6e5557] Took 1.22 seconds to deallocate network for instance. [ 1097.518573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.177s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.826666] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1097.826978] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1097.827194] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.827455] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1097.827647] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.827837] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1097.828094] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1097.828302] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1097.828488] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1097.828657] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1097.828832] env[62204]: DEBUG nova.virt.hardware [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1097.834092] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1097.834395] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d794fb53-1465-4cdc-a9cd-88962820bc5f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.853862] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1097.853862] env[62204]: value = "task-1200476" [ 1097.853862] env[62204]: _type = "Task" [ 1097.853862] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.857080] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.857361] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.857580] env[62204]: DEBUG nova.objects.instance [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lazy-loading 'resources' on Instance uuid 98805916-8501-4afb-9e1c-a5393f6e5557 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.863413] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200476, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.054126] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.054394] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.071121] env[62204]: INFO nova.scheduler.client.report [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocation for migration a10de0f5-5897-40e4-bf08-9f11f644d8f8 [ 1098.365455] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200476, 'name': ReconfigVM_Task, 'duration_secs': 0.151483} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.365933] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1098.366924] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea235f4-793a-4dc9-b424-28c1da9df30f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.391423] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1098.394144] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a900d39a-5d8b-4940-a623-cfb63f320c2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.412727] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1098.412727] env[62204]: value = "task-1200477" [ 1098.412727] env[62204]: _type = "Task" [ 1098.412727] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.421975] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200477, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.483256] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411d8319-bc34-4de7-89d3-e95e7eb84b84 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.490793] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1723e0ec-8386-4129-9475-a97eea1634c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.522318] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803f543e-51fb-4e72-b3ad-9d481752af66 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.529612] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89b5cf4-36e5-4165-8ca8-cde3b7e0cc27 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.542315] env[62204]: DEBUG nova.compute.provider_tree [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.556953] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1098.576593] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.835s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.922586] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200477, 'name': ReconfigVM_Task, 'duration_secs': 0.257402} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.922910] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.923224] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 50 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1099.045056] env[62204]: DEBUG nova.scheduler.client.report [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1099.077994] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.434244] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1325755-41e4-4446-abe3-b3f295ace608 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.451564] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38b9212-34fc-4b8c-9cd4-ab66ff5e642a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.470513] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 67 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1099.549972] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.692s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.552332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.474s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.553797] env[62204]: INFO nova.compute.claims [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.568143] env[62204]: INFO nova.scheduler.client.report [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Deleted allocations for instance 98805916-8501-4afb-9e1c-a5393f6e5557 [ 1099.769182] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.769465] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.769747] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.770009] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.770255] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.772403] env[62204]: INFO nova.compute.manager [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Terminating instance [ 1099.774347] env[62204]: DEBUG nova.compute.manager [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1099.774560] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.775632] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879d2a9c-0910-450f-afb6-ce9212eddf47 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.784558] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.785070] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c005038f-819c-4c32-89f3-89a5156f9d6f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.791086] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1099.791086] env[62204]: value = "task-1200478" [ 1099.791086] env[62204]: _type = "Task" [ 1099.791086] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.798518] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.031779] env[62204]: DEBUG nova.network.neutron [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Port 9d3af8f6-e075-441b-b191-6617ea2a18a4 binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1100.077074] env[62204]: DEBUG oslo_concurrency.lockutils [None req-84bb8747-4b23-44f3-b98a-311cd124572d tempest-ServersTestJSON-2134578796 tempest-ServersTestJSON-2134578796-project-member] Lock "98805916-8501-4afb-9e1c-a5393f6e5557" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.057s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.302443] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200478, 'name': PowerOffVM_Task, 'duration_secs': 0.171828} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.302633] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1100.302810] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1100.303081] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b86ff425-bcb9-4c80-8b73-de5b92537711 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.406163] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1100.406398] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1100.406585] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleting the datastore file [datastore2] 27ecfd31-6c25-436b-a2fa-27a40f1b0f36 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1100.406870] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-764e6b34-4bd4-4b79-8b6f-b2a8c6fdb277 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.414687] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for the task: (returnval){ [ 1100.414687] env[62204]: value = "task-1200480" [ 1100.414687] env[62204]: _type = "Task" [ 1100.414687] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.422291] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.686072] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de00a7c0-76ac-4ea6-aea2-06d32f26e4ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.695110] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d401f5e4-d8f5-436a-9c0f-fe5c542d8b5a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.725114] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a6af5c-8df8-4070-b1a4-5097e080cb26 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.732263] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf0af3c-cf3a-452c-a73d-a5effbc74be7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.745140] env[62204]: DEBUG nova.compute.provider_tree [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.924934] env[62204]: DEBUG oslo_vmware.api [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Task: {'id': task-1200480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151348} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.925223] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1100.925417] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1100.925602] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1100.925780] env[62204]: INFO nova.compute.manager [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1100.926037] env[62204]: DEBUG oslo.service.loopingcall [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.926243] env[62204]: DEBUG nova.compute.manager [-] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1100.926340] env[62204]: DEBUG nova.network.neutron [-] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1101.056497] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1101.059940] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.059940] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.248206] env[62204]: DEBUG nova.scheduler.client.report [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1101.278291] env[62204]: DEBUG nova.compute.manager [req-086d6e10-7324-4be4-aef7-8152bf31dcca req-3d8b008f-19a0-47f7-8251-aadeeea8a279 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Received event network-vif-deleted-34d2b53b-7f04-471d-b817-7fa154770046 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1101.278516] env[62204]: INFO nova.compute.manager [req-086d6e10-7324-4be4-aef7-8152bf31dcca req-3d8b008f-19a0-47f7-8251-aadeeea8a279 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Neutron deleted interface 34d2b53b-7f04-471d-b817-7fa154770046; detaching it from the instance and deleting it from the info cache [ 1101.278715] env[62204]: DEBUG nova.network.neutron [req-086d6e10-7324-4be4-aef7-8152bf31dcca req-3d8b008f-19a0-47f7-8251-aadeeea8a279 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.483926] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.484144] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.756443] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.204s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.756704] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1101.759691] env[62204]: DEBUG nova.network.neutron [-] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.781646] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11ee1eaf-102e-42fd-862b-c2a1b4c88fa4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.793302] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d225e9-d7b6-4a1b-b35a-94c8b12fb09e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.820717] env[62204]: DEBUG nova.compute.manager [req-086d6e10-7324-4be4-aef7-8152bf31dcca req-3d8b008f-19a0-47f7-8251-aadeeea8a279 service nova] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Detach interface failed, port_id=34d2b53b-7f04-471d-b817-7fa154770046, reason: Instance 27ecfd31-6c25-436b-a2fa-27a40f1b0f36 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1101.988735] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.988899] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1102.263377] env[62204]: DEBUG nova.compute.utils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1102.264809] env[62204]: INFO nova.compute.manager [-] [instance: 27ecfd31-6c25-436b-a2fa-27a40f1b0f36] Took 1.34 seconds to deallocate network for instance. [ 1102.265331] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1102.265551] env[62204]: DEBUG nova.network.neutron [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1102.312839] env[62204]: DEBUG nova.policy [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b52997d8756d4096b3dcba62f0bd14b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e70013d6da84d2b9a0719621c9f2c1a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1102.423968] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1102.424178] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.424375] env[62204]: DEBUG nova.network.neutron [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1102.585967] env[62204]: DEBUG nova.network.neutron [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Successfully created port: e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1102.766371] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1102.775419] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1102.775532] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1102.776573] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.808083] env[62204]: INFO nova.scheduler.client.report [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Deleted allocations for instance 27ecfd31-6c25-436b-a2fa-27a40f1b0f36 [ 1103.074008] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1103.074188] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquired lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.074691] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Forcefully refreshing network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1103.263878] env[62204]: DEBUG nova.network.neutron [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.317494] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a1e6ce79-52f1-4890-9f61-b13195f6b900 tempest-DeleteServersTestJSON-293601872 tempest-DeleteServersTestJSON-293601872-project-member] Lock "27ecfd31-6c25-436b-a2fa-27a40f1b0f36" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.548s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.767710] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1103.775686] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1103.801306] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1103.801582] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1103.801755] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1103.801922] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1103.802079] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1103.802234] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1103.802446] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1103.802610] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1103.802787] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1103.803008] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1103.803206] env[62204]: DEBUG nova.virt.hardware [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1103.804063] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2800a9dd-afac-46a5-98e5-112553f2c3fa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.811951] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0bf79c-5346-4dfd-a2f2-129535d5c3f2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.980570] env[62204]: DEBUG nova.compute.manager [req-0aa8a701-e5a8-4505-89cd-7d3ebd0a372e req-f201cee0-88a0-4fac-8385-21508bc247b7 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Received event network-vif-plugged-e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1103.980816] env[62204]: DEBUG oslo_concurrency.lockutils [req-0aa8a701-e5a8-4505-89cd-7d3ebd0a372e req-f201cee0-88a0-4fac-8385-21508bc247b7 service nova] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1103.981042] env[62204]: DEBUG oslo_concurrency.lockutils [req-0aa8a701-e5a8-4505-89cd-7d3ebd0a372e req-f201cee0-88a0-4fac-8385-21508bc247b7 service nova] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1103.981219] env[62204]: DEBUG oslo_concurrency.lockutils [req-0aa8a701-e5a8-4505-89cd-7d3ebd0a372e req-f201cee0-88a0-4fac-8385-21508bc247b7 service nova] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1103.981391] env[62204]: DEBUG nova.compute.manager [req-0aa8a701-e5a8-4505-89cd-7d3ebd0a372e req-f201cee0-88a0-4fac-8385-21508bc247b7 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] No waiting events found dispatching network-vif-plugged-e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1103.981650] env[62204]: WARNING nova.compute.manager [req-0aa8a701-e5a8-4505-89cd-7d3ebd0a372e req-f201cee0-88a0-4fac-8385-21508bc247b7 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Received unexpected event network-vif-plugged-e991eae7-de66-4f1d-94f0-a1917eeaa5f1 for instance with vm_state building and task_state spawning. [ 1104.068009] env[62204]: DEBUG nova.network.neutron [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Successfully updated port: e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1104.294415] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c8c118-99ba-4013-9722-879ff5903ae8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.314610] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [{"id": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "address": "fa:16:3e:52:85:36", "network": {"id": "27e9ef71-ed38-4d09-a2f5-f94e65383cfd", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1358621948-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8108a8f6b5e04832aab188333bad1e0e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59c7be21-51", "ovs_interfaceid": "59c7be21-51f9-4357-a2e4-24ec0bf0ed20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.318031] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41c26be-0f35-418e-93e4-1f6efa64a4e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.323230] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 83 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1104.570109] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1104.570280] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.570439] env[62204]: DEBUG nova.network.neutron [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1104.820785] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Releasing lock "refresh_cache-3258243e-a9df-4b3e-a6bd-17e3b2168efe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1104.820785] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updated the network info_cache for instance {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1104.820785] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.821216] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1104.828979] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.829266] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81cc8a8a-a39c-4ddb-aad6-2c1a869c3b69 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.836327] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1104.836327] env[62204]: value = "task-1200482" [ 1104.836327] env[62204]: _type = "Task" [ 1104.836327] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.844437] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.104956] env[62204]: DEBUG nova.network.neutron [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1105.238735] env[62204]: DEBUG nova.network.neutron [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updating instance_info_cache with network_info: [{"id": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "address": "fa:16:3e:68:d3:aa", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape991eae7-de", "ovs_interfaceid": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.346177] env[62204]: DEBUG oslo_vmware.api [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200482, 'name': PowerOnVM_Task, 'duration_secs': 0.374967} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.346476] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.346672] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d42178df-c0bb-43e1-82ad-f8747c070f27 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance '16b2b4d0-f6df-4025-b1f5-72e05c1d9abe' progress to 100 {{(pid=62204) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1105.741665] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1105.742017] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Instance network_info: |[{"id": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "address": "fa:16:3e:68:d3:aa", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape991eae7-de", "ovs_interfaceid": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1105.742503] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:d3:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ead20342-9afa-435e-a22b-b4a903457712', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e991eae7-de66-4f1d-94f0-a1917eeaa5f1', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1105.750077] env[62204]: DEBUG oslo.service.loopingcall [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.750287] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1105.750544] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-131a8209-f9ad-44ce-9e8e-35bed2b0287d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.770013] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1105.770013] env[62204]: value = "task-1200483" [ 1105.770013] env[62204]: _type = "Task" [ 1105.770013] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.777190] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200483, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.008903] env[62204]: DEBUG nova.compute.manager [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Received event network-changed-e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1106.009189] env[62204]: DEBUG nova.compute.manager [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Refreshing instance network info cache due to event network-changed-e991eae7-de66-4f1d-94f0-a1917eeaa5f1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1106.009411] env[62204]: DEBUG oslo_concurrency.lockutils [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] Acquiring lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.009534] env[62204]: DEBUG oslo_concurrency.lockutils [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] Acquired lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.009694] env[62204]: DEBUG nova.network.neutron [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Refreshing network info cache for port e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1106.281118] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200483, 'name': CreateVM_Task, 'duration_secs': 0.320463} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.281272] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1106.282067] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.282287] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.282653] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1106.282914] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6c374e2-f9a8-4de2-8b7e-7d0516a056a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.287385] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1106.287385] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e45491-ee3d-e6cb-2d0b-ec0677223f65" [ 1106.287385] env[62204]: _type = "Task" [ 1106.287385] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.294423] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e45491-ee3d-e6cb-2d0b-ec0677223f65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.693307] env[62204]: DEBUG nova.network.neutron [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updated VIF entry in instance network info cache for port e991eae7-de66-4f1d-94f0-a1917eeaa5f1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1106.693677] env[62204]: DEBUG nova.network.neutron [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updating instance_info_cache with network_info: [{"id": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "address": "fa:16:3e:68:d3:aa", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape991eae7-de", "ovs_interfaceid": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.798074] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52e45491-ee3d-e6cb-2d0b-ec0677223f65, 'name': SearchDatastore_Task, 'duration_secs': 0.010532} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.798348] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1106.798602] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1106.798835] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1106.798987] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.799182] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1106.799434] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6648185-9ef0-4f4c-93be-ad8b42b227ec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.807115] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1106.807322] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1106.807952] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24b2ca5d-0346-460f-b319-2f66b0aad202 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.812585] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1106.812585] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c2d61-9762-15c9-7b48-a9646e83f097" [ 1106.812585] env[62204]: _type = "Task" [ 1106.812585] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.819972] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c2d61-9762-15c9-7b48-a9646e83f097, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.196546] env[62204]: DEBUG oslo_concurrency.lockutils [req-15f7416b-6824-4de2-8249-353c4a620ac4 req-d6f22a6a-c09b-4f30-9982-e58ebc388012 service nova] Releasing lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.323082] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]521c2d61-9762-15c9-7b48-a9646e83f097, 'name': SearchDatastore_Task, 'duration_secs': 0.008322} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.323865] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68971607-f3d1-4ea6-90ec-4df838d7b675 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.328992] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1107.328992] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5280be76-b833-c6fc-2456-80183fcfd9c4" [ 1107.328992] env[62204]: _type = "Task" [ 1107.328992] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.336381] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5280be76-b833-c6fc-2456-80183fcfd9c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.840210] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5280be76-b833-c6fc-2456-80183fcfd9c4, 'name': SearchDatastore_Task, 'duration_secs': 0.011582} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.840560] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1107.840923] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 3b803bcb-3d18-455e-b2fa-40fe074dfdbb/3b803bcb-3d18-455e-b2fa-40fe074dfdbb.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1107.841687] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b655b63a-e0f6-408e-b8de-6897318c3490 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.848639] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1107.848639] env[62204]: value = "task-1200484" [ 1107.848639] env[62204]: _type = "Task" [ 1107.848639] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.856877] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.302954] env[62204]: DEBUG nova.network.neutron [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Port 9d3af8f6-e075-441b-b191-6617ea2a18a4 binding to destination host cpu-1 is already ACTIVE {{(pid=62204) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1108.303415] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1108.303704] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.304016] env[62204]: DEBUG nova.network.neutron [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1108.359272] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200484, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.868808] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.97902} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.869013] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 3b803bcb-3d18-455e-b2fa-40fe074dfdbb/3b803bcb-3d18-455e-b2fa-40fe074dfdbb.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1108.869263] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1108.869542] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a23bc68b-2661-457d-8301-0cd14dd2bc25 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.877036] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1108.877036] env[62204]: value = "task-1200485" [ 1108.877036] env[62204]: _type = "Task" [ 1108.877036] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.894251] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200485, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.133327] env[62204]: DEBUG nova.network.neutron [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.389877] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.23101} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.390259] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1109.391346] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc47078-7136-4d27-84e8-319a247dc1f5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.414022] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 3b803bcb-3d18-455e-b2fa-40fe074dfdbb/3b803bcb-3d18-455e-b2fa-40fe074dfdbb.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1109.414325] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c8edfae-7953-49d8-b733-3cbb12d153e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.433327] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1109.433327] env[62204]: value = "task-1200486" [ 1109.433327] env[62204]: _type = "Task" [ 1109.433327] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.441245] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200486, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.636753] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1109.943053] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200486, 'name': ReconfigVM_Task, 'duration_secs': 0.268791} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.943053] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 3b803bcb-3d18-455e-b2fa-40fe074dfdbb/3b803bcb-3d18-455e-b2fa-40fe074dfdbb.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1109.943642] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9cc02d4-ff94-4e44-8813-2d3635a50e63 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.949966] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1109.949966] env[62204]: value = "task-1200487" [ 1109.949966] env[62204]: _type = "Task" [ 1109.949966] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.957910] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200487, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.141034] env[62204]: DEBUG nova.compute.manager [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62204) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:897}} [ 1110.141334] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1110.141600] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1110.459469] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200487, 'name': Rename_Task, 'duration_secs': 0.156422} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.459790] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1110.459993] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c753c787-f5d3-4d3d-8992-e4be12ca93a4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.465870] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1110.465870] env[62204]: value = "task-1200488" [ 1110.465870] env[62204]: _type = "Task" [ 1110.465870] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.472957] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200488, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.644969] env[62204]: DEBUG nova.objects.instance [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'migration_context' on Instance uuid 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.976552] env[62204]: DEBUG oslo_vmware.api [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200488, 'name': PowerOnVM_Task, 'duration_secs': 0.468578} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.976859] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1110.977106] env[62204]: INFO nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1110.977355] env[62204]: DEBUG nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1110.978140] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c4a97f-3fc6-40b0-bd8e-170131aeae5b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.227026] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c2e3c1-d432-4fd5-91f5-6ef5e54f9c1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.233981] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240afdd2-2823-4639-a44f-1dfe276bb49b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.264079] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2408b2a6-b847-42c5-9aee-20b86567e9c2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.270808] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cc87d3-c702-43cc-8a54-2c04dcb066a6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.283707] env[62204]: DEBUG nova.compute.provider_tree [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1111.286512] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.286738] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.286936] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1111.287148] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1111.287330] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.289255] env[62204]: INFO nova.compute.manager [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Terminating instance [ 1111.291273] env[62204]: DEBUG nova.compute.manager [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1111.291465] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1111.292163] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6881d287-3d51-404b-b3a4-419f6e3ff92f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.299500] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.299715] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68781dac-5a65-4705-8aa1-9f65054f321f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.305568] env[62204]: DEBUG oslo_vmware.api [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1111.305568] env[62204]: value = "task-1200489" [ 1111.305568] env[62204]: _type = "Task" [ 1111.305568] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.314277] env[62204]: DEBUG oslo_vmware.api [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.496558] env[62204]: INFO nova.compute.manager [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Took 12.43 seconds to build instance. [ 1111.786613] env[62204]: DEBUG nova.scheduler.client.report [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1111.816500] env[62204]: DEBUG oslo_vmware.api [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200489, 'name': PowerOffVM_Task, 'duration_secs': 0.202596} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.816774] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1111.816946] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1111.817205] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-319f722c-c426-4932-98d6-83cfd1b1c9b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.884373] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1111.884627] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1111.884781] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleting the datastore file [datastore2] 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1111.885051] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4886c04-c81f-487a-97bb-3903be734740 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.891043] env[62204]: DEBUG oslo_vmware.api [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for the task: (returnval){ [ 1111.891043] env[62204]: value = "task-1200491" [ 1111.891043] env[62204]: _type = "Task" [ 1111.891043] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.899087] env[62204]: DEBUG oslo_vmware.api [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.998337] env[62204]: DEBUG oslo_concurrency.lockutils [None req-3f801818-8897-4204-8d65-3450a107f44c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.944s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.402905] env[62204]: DEBUG oslo_vmware.api [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Task: {'id': task-1200491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133876} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.402905] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1112.402905] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1112.402905] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1112.403204] env[62204]: INFO nova.compute.manager [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1112.404213] env[62204]: DEBUG oslo.service.loopingcall [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1112.404213] env[62204]: DEBUG nova.compute.manager [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1112.404213] env[62204]: DEBUG nova.network.neutron [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1112.714582] env[62204]: DEBUG nova.compute.manager [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Received event network-changed-e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1112.714839] env[62204]: DEBUG nova.compute.manager [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Refreshing instance network info cache due to event network-changed-e991eae7-de66-4f1d-94f0-a1917eeaa5f1. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1112.715075] env[62204]: DEBUG oslo_concurrency.lockutils [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] Acquiring lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1112.715235] env[62204]: DEBUG oslo_concurrency.lockutils [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] Acquired lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.715395] env[62204]: DEBUG nova.network.neutron [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Refreshing network info cache for port e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1112.797936] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.656s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1113.009541] env[62204]: DEBUG nova.compute.manager [req-d1baded6-c2d6-41a6-b314-4e24e3e12039 req-6bc3dc32-78ad-46ed-ac10-fb4c4509d831 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Received event network-vif-deleted-59c7be21-51f9-4357-a2e4-24ec0bf0ed20 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1113.009825] env[62204]: INFO nova.compute.manager [req-d1baded6-c2d6-41a6-b314-4e24e3e12039 req-6bc3dc32-78ad-46ed-ac10-fb4c4509d831 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Neutron deleted interface 59c7be21-51f9-4357-a2e4-24ec0bf0ed20; detaching it from the instance and deleting it from the info cache [ 1113.010050] env[62204]: DEBUG nova.network.neutron [req-d1baded6-c2d6-41a6-b314-4e24e3e12039 req-6bc3dc32-78ad-46ed-ac10-fb4c4509d831 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.454484] env[62204]: DEBUG nova.network.neutron [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updated VIF entry in instance network info cache for port e991eae7-de66-4f1d-94f0-a1917eeaa5f1. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1113.454949] env[62204]: DEBUG nova.network.neutron [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updating instance_info_cache with network_info: [{"id": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "address": "fa:16:3e:68:d3:aa", "network": {"id": "152205c7-b457-4267-88d0-d580bcdaf135", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1242806114-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e70013d6da84d2b9a0719621c9f2c1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ead20342-9afa-435e-a22b-b4a903457712", "external-id": "nsx-vlan-transportzone-912", "segmentation_id": 912, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape991eae7-de", "ovs_interfaceid": "e991eae7-de66-4f1d-94f0-a1917eeaa5f1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.483768] env[62204]: DEBUG nova.network.neutron [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.512762] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fcfef102-b57c-4810-bdab-65ca1ed18c88 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.522309] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ebc53c-5355-4795-8af5-14dab8c4eed6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.549521] env[62204]: DEBUG nova.compute.manager [req-d1baded6-c2d6-41a6-b314-4e24e3e12039 req-6bc3dc32-78ad-46ed-ac10-fb4c4509d831 service nova] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Detach interface failed, port_id=59c7be21-51f9-4357-a2e4-24ec0bf0ed20, reason: Instance 3258243e-a9df-4b3e-a6bd-17e3b2168efe could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1113.957920] env[62204]: DEBUG oslo_concurrency.lockutils [req-e73f81a4-162a-45cb-a786-a0a840379c63 req-1224159e-4270-46e2-96de-ae233bd11bf9 service nova] Releasing lock "refresh_cache-3b803bcb-3d18-455e-b2fa-40fe074dfdbb" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1113.986707] env[62204]: INFO nova.compute.manager [-] [instance: 3258243e-a9df-4b3e-a6bd-17e3b2168efe] Took 1.58 seconds to deallocate network for instance. [ 1114.337180] env[62204]: INFO nova.compute.manager [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Swapping old allocation on dict_keys(['92e8f362-5134-40c6-9a5c-0b8f64197972']) held by migration f1a1a5ec-6371-49f6-ae6a-4fc07bb9e46a for instance [ 1114.359524] env[62204]: DEBUG nova.scheduler.client.report [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Overwriting current allocation {'allocations': {'92e8f362-5134-40c6-9a5c-0b8f64197972': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 151}}, 'project_id': '1ef8dc436e4b45d0a8d50468666358e3', 'user_id': '6db3ba1bb9b9464d870969f1f7d95a9d', 'consumer_generation': 1} on consumer 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe {{(pid=62204) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1114.436965] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1114.437238] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.437490] env[62204]: DEBUG nova.network.neutron [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1114.494041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.494041] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.494041] env[62204]: DEBUG nova.objects.instance [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lazy-loading 'resources' on Instance uuid 3258243e-a9df-4b3e-a6bd-17e3b2168efe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.058060] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f75a7f-f14a-4ec5-99c4-08e8e152dc7d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.065663] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e2ac60-92ae-492a-9136-943ba6a981ca {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.099168] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c88942d-5ee3-4dab-a289-b16e2696c61c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.106306] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f11bc1-2507-40db-bff9-d46a996f5d68 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.120523] env[62204]: DEBUG nova.compute.provider_tree [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.165072] env[62204]: DEBUG nova.network.neutron [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.624130] env[62204]: DEBUG nova.scheduler.client.report [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1115.668076] env[62204]: DEBUG oslo_concurrency.lockutils [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1115.668612] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1115.668879] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2135315c-3a95-4e83-b43e-6acc53608d3d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.676116] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1115.676116] env[62204]: value = "task-1200492" [ 1115.676116] env[62204]: _type = "Task" [ 1115.676116] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.684243] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200492, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.129554] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.146466] env[62204]: INFO nova.scheduler.client.report [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Deleted allocations for instance 3258243e-a9df-4b3e-a6bd-17e3b2168efe [ 1116.185267] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200492, 'name': PowerOffVM_Task, 'duration_secs': 0.191847} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.185558] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1116.186216] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1116.186435] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1116.186597] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.186808] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1116.186954] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.187120] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1116.187351] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1116.187508] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1116.187678] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1116.187848] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1116.188025] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1116.192928] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8feaaae6-aa78-483f-8321-caea75891016 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.208439] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1116.208439] env[62204]: value = "task-1200493" [ 1116.208439] env[62204]: _type = "Task" [ 1116.208439] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.218366] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200493, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.655113] env[62204]: DEBUG oslo_concurrency.lockutils [None req-a5d5c982-a709-4341-9fcc-4c5c93be0c4d tempest-AttachVolumeShelveTestJSON-2052414767 tempest-AttachVolumeShelveTestJSON-2052414767-project-member] Lock "3258243e-a9df-4b3e-a6bd-17e3b2168efe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.368s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.718607] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200493, 'name': ReconfigVM_Task, 'duration_secs': 0.143093} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.719516] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b9836b-f90c-471f-99e4-9869eab4d4be {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.738557] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1116.738848] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1116.739069] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1116.739320] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1116.739556] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1116.739779] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1116.740084] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1116.740245] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1116.740500] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1116.740703] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1116.740916] env[62204]: DEBUG nova.virt.hardware [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1116.741733] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35c91657-c3a2-465d-984d-d2181b57bae3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.747538] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1116.747538] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527f85f8-42d8-0356-c5db-3e79a8986f60" [ 1116.747538] env[62204]: _type = "Task" [ 1116.747538] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.755248] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527f85f8-42d8-0356-c5db-3e79a8986f60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.258143] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]527f85f8-42d8-0356-c5db-3e79a8986f60, 'name': SearchDatastore_Task, 'duration_secs': 0.00678} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.263457] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1117.263739] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af043ba0-ec6e-4a91-af33-40872347aeb5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.280831] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1117.280831] env[62204]: value = "task-1200494" [ 1117.280831] env[62204]: _type = "Task" [ 1117.280831] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.288185] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200494, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.346849] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1117.789922] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200494, 'name': ReconfigVM_Task, 'duration_secs': 0.17903} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.791968] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1117.791968] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f0568e-e61b-4562-8e2a-646f0ca70d1e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.813829] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.814059] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f6dc61b-c02c-4100-a037-df50fff821d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.830852] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1117.830852] env[62204]: value = "task-1200496" [ 1117.830852] env[62204]: _type = "Task" [ 1117.830852] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.838047] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200496, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.340364] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200496, 'name': ReconfigVM_Task, 'duration_secs': 0.272049} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.340689] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe/16b2b4d0-f6df-4025-b1f5-72e05c1d9abe.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.341479] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb3b58a-42ff-4f96-9595-d29041e43dd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.358515] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.358666] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1118.360544] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a75862c-e45e-4d38-b296-0ac428edbdef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.378657] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27140a19-1af8-45f1-874f-16e65e634b77 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.396339] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99fb670e-dea7-4850-a623-7e9c420236ea {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.402456] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.402693] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c00266a-4b27-494e-a0ca-51e7b1eb94bd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.409745] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1118.409745] env[62204]: value = "task-1200497" [ 1118.409745] env[62204]: _type = "Task" [ 1118.409745] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.416622] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200497, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.924918] env[62204]: DEBUG oslo_vmware.api [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200497, 'name': PowerOnVM_Task, 'duration_secs': 0.358237} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.924918] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.368555] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1119.368902] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquired lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.368902] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Forcefully refreshing network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1119.932029] env[62204]: INFO nova.compute.manager [None req-09386815-da78-4baa-a42f-6e65c72450c6 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance to original state: 'active' [ 1120.694954] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [{"id": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "address": "fa:16:3e:26:e0:27", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d3af8f6-e0", "ovs_interfaceid": "9d3af8f6-e075-441b-b191-6617ea2a18a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.989742] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.990151] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.990453] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1120.990729] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1120.991012] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1120.993293] env[62204]: INFO nova.compute.manager [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Terminating instance [ 1120.995181] env[62204]: DEBUG nova.compute.manager [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1120.995452] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1120.996388] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953f3dad-8da7-4ab9-9284-67858683f868 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.004496] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1121.004990] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a90cd623-4149-4e3d-81d6-7c989daa89c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.010829] env[62204]: DEBUG oslo_vmware.api [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1121.010829] env[62204]: value = "task-1200498" [ 1121.010829] env[62204]: _type = "Task" [ 1121.010829] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.018906] env[62204]: DEBUG oslo_vmware.api [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.199271] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Releasing lock "refresh_cache-16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1121.199271] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updated the network info_cache for instance {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1121.199271] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.199271] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.199271] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.199271] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.199271] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.520918] env[62204]: DEBUG oslo_vmware.api [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200498, 'name': PowerOffVM_Task, 'duration_secs': 0.201233} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.521176] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.521363] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1121.521611] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9e2a3c8-e7ff-4774-8ca9-ce2d51268e54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.582818] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1121.583072] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1121.583268] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleting the datastore file [datastore1] 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.583532] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f962b7e-97fb-4bac-a8bb-4673025161b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.590222] env[62204]: DEBUG oslo_vmware.api [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1121.590222] env[62204]: value = "task-1200500" [ 1121.590222] env[62204]: _type = "Task" [ 1121.590222] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.597740] env[62204]: DEBUG oslo_vmware.api [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.706195] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Getting list of instances from cluster (obj){ [ 1121.706195] env[62204]: value = "domain-c8" [ 1121.706195] env[62204]: _type = "ClusterComputeResource" [ 1121.706195] env[62204]: } {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1121.706878] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a67932-5d23-4de5-83c6-16d6e77c09db {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.719602] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Got total of 2 instances {{(pid=62204) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1121.719733] env[62204]: WARNING nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] While synchronizing instance power states, found 3 instances in the database and 2 instances on the hypervisor. [ 1121.719872] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1121.720072] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1121.720401] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Triggering sync for uuid 3b803bcb-3d18-455e-b2fa-40fe074dfdbb {{(pid=62204) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1121.720752] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.721017] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.721684] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "18cad772-c6f8-4797-8c03-86321ee62958" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.721684] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1121.721684] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1121.721873] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.722054] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1121.722720] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c74bd7-ee9d-4206-8503-adc13c98a031 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.726024] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4a9c92-d2cb-463e-864b-8040e662a492 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.728093] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1122.100484] env[62204]: DEBUG oslo_vmware.api [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.309203} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.100762] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.100951] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.101156] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.101335] env[62204]: INFO nova.compute.manager [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1122.101621] env[62204]: DEBUG oslo.service.loopingcall [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1122.101841] env[62204]: DEBUG nova.compute.manager [-] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1122.101936] env[62204]: DEBUG nova.network.neutron [-] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1122.230767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.230998] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.231191] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.231408] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1122.232270] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a91a38-4c27-4496-8b65-c34a690c2c3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.236531] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "18cad772-c6f8-4797-8c03-86321ee62958" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.515s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.239271] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.517s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1122.240512] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8772b3-7cb9-4c17-99fd-d28b95c5d338 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.254959] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf863d2-b87d-4558-a189-da4bea32ea79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.261166] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d87e4a-defb-46b4-8e18-40a6e4987d07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.289852] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180620MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1122.289995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1122.290202] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1122.635214] env[62204]: DEBUG nova.compute.manager [req-45283c9a-ca26-4c0a-bcce-55ce004971eb req-280b940f-984b-4627-8cbe-f9fcb2ad6f68 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Received event network-vif-deleted-9d3af8f6-e075-441b-b191-6617ea2a18a4 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1122.635496] env[62204]: INFO nova.compute.manager [req-45283c9a-ca26-4c0a-bcce-55ce004971eb req-280b940f-984b-4627-8cbe-f9fcb2ad6f68 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Neutron deleted interface 9d3af8f6-e075-441b-b191-6617ea2a18a4; detaching it from the instance and deleting it from the info cache [ 1122.635681] env[62204]: DEBUG nova.network.neutron [req-45283c9a-ca26-4c0a-bcce-55ce004971eb req-280b940f-984b-4627-8cbe-f9fcb2ad6f68 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.024466] env[62204]: DEBUG nova.network.neutron [-] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.138154] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e988d4e-3a08-49aa-befa-9bd52e4ee389 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.149763] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-085bae0c-e466-4604-a0c8-e91879484c1c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.173870] env[62204]: DEBUG nova.compute.manager [req-45283c9a-ca26-4c0a-bcce-55ce004971eb req-280b940f-984b-4627-8cbe-f9fcb2ad6f68 service nova] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Detach interface failed, port_id=9d3af8f6-e075-441b-b191-6617ea2a18a4, reason: Instance 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1123.318145] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 18cad772-c6f8-4797-8c03-86321ee62958 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.318374] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 3b803bcb-3d18-455e-b2fa-40fe074dfdbb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.318535] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1123.318729] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1123.318871] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1123.371249] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ead3522-ddaa-4fe4-8a13-91815db6ad23 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.379093] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84223f85-24d5-4887-a1c3-34b80be20db4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.409899] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbd8def-e25a-4921-a260-794f04e363d7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.416649] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cc1bf3-5b2d-40ec-bbc0-7a7cfe864ff8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.430421] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.527134] env[62204]: INFO nova.compute.manager [-] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] Took 1.42 seconds to deallocate network for instance. [ 1123.933644] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1124.035571] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.329659] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1124.329930] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.330204] env[62204]: INFO nova.compute.manager [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Shelving [ 1124.438952] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1124.439172] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.149s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1124.439401] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.404s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1124.439632] env[62204]: DEBUG nova.objects.instance [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'resources' on Instance uuid 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1124.838019] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.838299] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49cbfa49-4122-425a-b035-d566b9635cbe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.845326] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1124.845326] env[62204]: value = "task-1200501" [ 1124.845326] env[62204]: _type = "Task" [ 1124.845326] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.853216] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.000283] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e12ab40-55df-4f50-a4f0-475499a5b9b8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.007939] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f0f68f-d4ae-4e1b-a81b-b94a0bd0e0b3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.039778] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d290b9-e0bb-4c45-adff-c9a861b9e7bb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.047010] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1ceeb9-e2cc-410c-8355-9761fb06cfae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.061054] env[62204]: DEBUG nova.compute.provider_tree [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.356263] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200501, 'name': PowerOffVM_Task, 'duration_secs': 0.198211} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.356602] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.357488] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d54e60f-9007-4803-96fd-0146ba733859 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.376354] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb8c5e9-60f1-4614-b326-70180e24f83d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.563993] env[62204]: DEBUG nova.scheduler.client.report [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1125.885909] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Creating Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1125.886247] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-57ed41c5-6ebf-43d9-af53-f5ba0db43028 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.894114] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1125.894114] env[62204]: value = "task-1200502" [ 1125.894114] env[62204]: _type = "Task" [ 1125.894114] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.901904] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200502, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.069234] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.087715] env[62204]: INFO nova.scheduler.client.report [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted allocations for instance 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe [ 1126.403543] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200502, 'name': CreateSnapshot_Task, 'duration_secs': 0.445908} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.403772] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Created Snapshot of the VM instance {{(pid=62204) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1126.404590] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5405fe-c5bf-465a-9c73-c60907357e3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.597361] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f7bfa3c7-709e-42be-9772-109936723296 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.607s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.598745] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.878s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.598948] env[62204]: INFO nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 16b2b4d0-f6df-4025-b1f5-72e05c1d9abe] During sync_power_state the instance has a pending task (deleting). Skip. [ 1126.599144] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "16b2b4d0-f6df-4025-b1f5-72e05c1d9abe" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.921350] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Creating linked-clone VM from snapshot {{(pid=62204) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1126.921672] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7687bcbc-43a3-45ee-9ec1-513bd826b78e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.930870] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1126.930870] env[62204]: value = "task-1200503" [ 1126.930870] env[62204]: _type = "Task" [ 1126.930870] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.938586] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200503, 'name': CloneVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.440245] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200503, 'name': CloneVM_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.940581] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200503, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.114155] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1128.114409] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1128.441644] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200503, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.617096] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1128.943161] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200503, 'name': CloneVM_Task} progress is 95%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.148370] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1129.148725] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1129.150339] env[62204]: INFO nova.compute.claims [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.424491] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1129.442706] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200503, 'name': CloneVM_Task, 'duration_secs': 2.474722} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.443125] env[62204]: INFO nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Created linked-clone VM from snapshot [ 1129.444168] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582b2462-dba1-46d2-9c7d-4f7f74993f92 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.451221] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Uploading image 087ecd5f-de6c-438e-aa15-8563149d3991 {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1129.474283] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1129.474283] env[62204]: value = "vm-260151" [ 1129.474283] env[62204]: _type = "VirtualMachine" [ 1129.474283] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1129.474538] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6bb88621-078e-4aba-b904-ada78909ac86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.480635] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease: (returnval){ [ 1129.480635] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa6ae5-b3ea-6761-3bdd-c677bb99780a" [ 1129.480635] env[62204]: _type = "HttpNfcLease" [ 1129.480635] env[62204]: } obtained for exporting VM: (result){ [ 1129.480635] env[62204]: value = "vm-260151" [ 1129.480635] env[62204]: _type = "VirtualMachine" [ 1129.480635] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1129.480892] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the lease: (returnval){ [ 1129.480892] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa6ae5-b3ea-6761-3bdd-c677bb99780a" [ 1129.480892] env[62204]: _type = "HttpNfcLease" [ 1129.480892] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1129.486975] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1129.486975] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa6ae5-b3ea-6761-3bdd-c677bb99780a" [ 1129.486975] env[62204]: _type = "HttpNfcLease" [ 1129.486975] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1129.988629] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1129.988629] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa6ae5-b3ea-6761-3bdd-c677bb99780a" [ 1129.988629] env[62204]: _type = "HttpNfcLease" [ 1129.988629] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1129.988984] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1129.988984] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa6ae5-b3ea-6761-3bdd-c677bb99780a" [ 1129.988984] env[62204]: _type = "HttpNfcLease" [ 1129.988984] env[62204]: }. {{(pid=62204) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1129.989663] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa54dfb-045e-4df7-b3d6-eac12d79530b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.997919] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69aa-4057-4eb2-14a9-852e3516ddc2/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1129.998109] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69aa-4057-4eb2-14a9-852e3516ddc2/disk-0.vmdk for reading. {{(pid=62204) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1130.091385] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0ca62e65-1c1f-4317-9d52-445830cf582f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.203479] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7df72b9-9d84-463f-b31c-409e5636e713 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.210552] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520223be-0a15-4052-8b61-029ac9f8a271 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.239752] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1d4241-4816-482a-8511-02aafac2fe94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.246333] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8065cce0-452a-4129-bf9a-7073112edeae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.259049] env[62204]: DEBUG nova.compute.provider_tree [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.762353] env[62204]: DEBUG nova.scheduler.client.report [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1131.267766] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1131.268609] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1131.775147] env[62204]: DEBUG nova.compute.utils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1131.776716] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1131.777011] env[62204]: DEBUG nova.network.neutron [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1131.817032] env[62204]: DEBUG nova.policy [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6db3ba1bb9b9464d870969f1f7d95a9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ef8dc436e4b45d0a8d50468666358e3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1132.102457] env[62204]: DEBUG nova.network.neutron [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Successfully created port: cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.280934] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1133.292729] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1133.319443] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1133.319709] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1133.319945] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.320185] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1133.320340] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.320493] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1133.320732] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1133.320911] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1133.321101] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1133.321278] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1133.321496] env[62204]: DEBUG nova.virt.hardware [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1133.322386] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1a6dfd-c862-44d6-b90c-ad912cc3bdff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.330467] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a5209e-d45f-4f37-9db4-3bf90ab76106 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.515631] env[62204]: DEBUG nova.compute.manager [req-551f463c-181c-49e1-b782-5c2146f632e3 req-4f6b7960-7a9e-4723-b73b-edc7704e0f7c service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Received event network-vif-plugged-cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1133.515793] env[62204]: DEBUG oslo_concurrency.lockutils [req-551f463c-181c-49e1-b782-5c2146f632e3 req-4f6b7960-7a9e-4723-b73b-edc7704e0f7c service nova] Acquiring lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.515970] env[62204]: DEBUG oslo_concurrency.lockutils [req-551f463c-181c-49e1-b782-5c2146f632e3 req-4f6b7960-7a9e-4723-b73b-edc7704e0f7c service nova] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.516265] env[62204]: DEBUG oslo_concurrency.lockutils [req-551f463c-181c-49e1-b782-5c2146f632e3 req-4f6b7960-7a9e-4723-b73b-edc7704e0f7c service nova] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.516579] env[62204]: DEBUG nova.compute.manager [req-551f463c-181c-49e1-b782-5c2146f632e3 req-4f6b7960-7a9e-4723-b73b-edc7704e0f7c service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] No waiting events found dispatching network-vif-plugged-cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1133.516780] env[62204]: WARNING nova.compute.manager [req-551f463c-181c-49e1-b782-5c2146f632e3 req-4f6b7960-7a9e-4723-b73b-edc7704e0f7c service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Received unexpected event network-vif-plugged-cb9ecbdd-94c6-48fe-acc1-c0721410b962 for instance with vm_state building and task_state spawning. [ 1134.045317] env[62204]: DEBUG nova.network.neutron [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Successfully updated port: cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.096075] env[62204]: DEBUG nova.compute.manager [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Received event network-changed-cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1134.096279] env[62204]: DEBUG nova.compute.manager [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Refreshing instance network info cache due to event network-changed-cb9ecbdd-94c6-48fe-acc1-c0721410b962. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1134.096479] env[62204]: DEBUG oslo_concurrency.lockutils [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] Acquiring lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.096662] env[62204]: DEBUG oslo_concurrency.lockutils [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] Acquired lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.097249] env[62204]: DEBUG nova.network.neutron [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Refreshing network info cache for port cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1134.550083] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1134.632154] env[62204]: DEBUG nova.network.neutron [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1134.710874] env[62204]: DEBUG nova.network.neutron [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.213647] env[62204]: DEBUG oslo_concurrency.lockutils [req-9732cee3-86b4-42fc-93de-0da7785e1e90 req-d177d82a-f5b5-44d4-ae4c-f22aab6055b4 service nova] Releasing lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1135.214078] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.214246] env[62204]: DEBUG nova.network.neutron [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1135.745402] env[62204]: DEBUG nova.network.neutron [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1135.874973] env[62204]: DEBUG nova.network.neutron [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [{"id": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "address": "fa:16:3e:ca:ef:72", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb9ecbdd-94", "ovs_interfaceid": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.377531] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1136.378024] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Instance network_info: |[{"id": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "address": "fa:16:3e:ca:ef:72", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb9ecbdd-94", "ovs_interfaceid": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1136.378474] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:ef:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb9ecbdd-94c6-48fe-acc1-c0721410b962', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1136.385894] env[62204]: DEBUG oslo.service.loopingcall [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1136.386150] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1136.386394] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5560f76-ab5a-48c5-851c-1d6851e08561 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.405978] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1136.405978] env[62204]: value = "task-1200505" [ 1136.405978] env[62204]: _type = "Task" [ 1136.405978] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.413575] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200505, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.917114] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200505, 'name': CreateVM_Task, 'duration_secs': 0.333572} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.917523] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.918022] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1136.918203] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.918564] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1136.918830] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-470e3eee-ba43-4dd6-9ab8-0a07ec0d25e2 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.923898] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1136.923898] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa19a3-8a52-96f2-b986-22690f17eed1" [ 1136.923898] env[62204]: _type = "Task" [ 1136.923898] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.932291] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa19a3-8a52-96f2-b986-22690f17eed1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.435767] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52fa19a3-8a52-96f2-b986-22690f17eed1, 'name': SearchDatastore_Task, 'duration_secs': 0.012278} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.435767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1137.435767] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.435767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1137.435767] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.436098] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.436227] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7353b74-c7c4-4621-8a06-aa0f18419f30 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.444984] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.445212] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1137.446076] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc0ce4e1-b4d3-465b-852a-10f350bcceef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.451265] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1137.451265] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522d5e0c-5a8d-c923-34c5-0c34c6b9899e" [ 1137.451265] env[62204]: _type = "Task" [ 1137.451265] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.459127] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522d5e0c-5a8d-c923-34c5-0c34c6b9899e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.962075] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522d5e0c-5a8d-c923-34c5-0c34c6b9899e, 'name': SearchDatastore_Task, 'duration_secs': 0.008407} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.962625] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2804b8c5-efe3-487d-a2d6-01e468e0d133 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.967771] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1137.967771] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5296476e-c045-ce53-327e-3104de0cb0bf" [ 1137.967771] env[62204]: _type = "Task" [ 1137.967771] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.975855] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5296476e-c045-ce53-327e-3104de0cb0bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.058456] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69aa-4057-4eb2-14a9-852e3516ddc2/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1138.059375] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b9633a-7442-46c1-b8a1-12c4b5bdc773 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.065393] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69aa-4057-4eb2-14a9-852e3516ddc2/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1138.065566] env[62204]: ERROR oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69aa-4057-4eb2-14a9-852e3516ddc2/disk-0.vmdk due to incomplete transfer. [ 1138.065791] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ce473846-01f0-413c-96bc-62ea738cd39c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.072141] env[62204]: DEBUG oslo_vmware.rw_handles [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527b69aa-4057-4eb2-14a9-852e3516ddc2/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1138.072338] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Uploaded image 087ecd5f-de6c-438e-aa15-8563149d3991 to the Glance image server {{(pid=62204) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1138.074607] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Destroying the VM {{(pid=62204) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1138.074836] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b8237f2d-8f20-4a21-9373-d65fa3dc8e54 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.080027] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1138.080027] env[62204]: value = "task-1200506" [ 1138.080027] env[62204]: _type = "Task" [ 1138.080027] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.087203] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200506, 'name': Destroy_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.477634] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5296476e-c045-ce53-327e-3104de0cb0bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009496} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.477984] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "[datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1138.478329] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 471b4b93-a4b7-4b1c-8559-24a8db15b1b7/471b4b93-a4b7-4b1c-8559-24a8db15b1b7.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1138.478675] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e1dfe01-c9c8-4d2e-812d-582641e5d252 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.485570] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1138.485570] env[62204]: value = "task-1200507" [ 1138.485570] env[62204]: _type = "Task" [ 1138.485570] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.493009] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.590423] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200506, 'name': Destroy_Task} progress is 33%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.995157] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200507, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.432297} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.995497] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore2] 471b4b93-a4b7-4b1c-8559-24a8db15b1b7/471b4b93-a4b7-4b1c-8559-24a8db15b1b7.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1138.995671] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1138.995972] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-518cc8dd-eaf7-4872-9592-9d7f7a97ac43 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.002350] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1139.002350] env[62204]: value = "task-1200508" [ 1139.002350] env[62204]: _type = "Task" [ 1139.002350] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.009566] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.089994] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200506, 'name': Destroy_Task, 'duration_secs': 0.722567} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.090273] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Destroyed the VM [ 1139.090518] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deleting Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1139.090766] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-27e84319-0f10-4f41-8719-45d28ad53f2e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.096337] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1139.096337] env[62204]: value = "task-1200509" [ 1139.096337] env[62204]: _type = "Task" [ 1139.096337] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.103362] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200509, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.511647] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06284} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.511917] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1139.512684] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6178072e-a374-4c65-a08a-e8d683d1fe91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.535337] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 471b4b93-a4b7-4b1c-8559-24a8db15b1b7/471b4b93-a4b7-4b1c-8559-24a8db15b1b7.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.535567] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fcee81e-609c-46c6-aa51-ab7960a58ec7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.554538] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1139.554538] env[62204]: value = "task-1200510" [ 1139.554538] env[62204]: _type = "Task" [ 1139.554538] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.561986] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200510, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.605430] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200509, 'name': RemoveSnapshot_Task, 'duration_secs': 0.389027} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.605671] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deleted Snapshot of the VM instance {{(pid=62204) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1139.605939] env[62204]: DEBUG nova.compute.manager [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1139.606674] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3549e66-1b0b-4f58-be7e-44d428c106d1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.065078] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200510, 'name': ReconfigVM_Task, 'duration_secs': 0.294225} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.065078] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 471b4b93-a4b7-4b1c-8559-24a8db15b1b7/471b4b93-a4b7-4b1c-8559-24a8db15b1b7.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.065835] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3209f4d5-12bf-4393-8103-665912751416 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.072646] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1140.072646] env[62204]: value = "task-1200511" [ 1140.072646] env[62204]: _type = "Task" [ 1140.072646] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.079753] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200511, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.122225] env[62204]: INFO nova.compute.manager [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Shelve offloading [ 1140.123280] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.123561] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f5d8783-8a68-4953-8fee-adc6ce989fd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.130221] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1140.130221] env[62204]: value = "task-1200512" [ 1140.130221] env[62204]: _type = "Task" [ 1140.130221] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.138644] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.582334] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200511, 'name': Rename_Task, 'duration_secs': 0.146527} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.583058] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1140.583179] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7288458e-35b1-4711-b893-28ae8da16f79 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.589160] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1140.589160] env[62204]: value = "task-1200513" [ 1140.589160] env[62204]: _type = "Task" [ 1140.589160] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.596162] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.639928] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] VM already powered off {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1140.640183] env[62204]: DEBUG nova.compute.manager [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1140.640946] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82671437-15bf-4bdd-a9f1-e7febde98a60 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.646171] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1140.646350] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.646571] env[62204]: DEBUG nova.network.neutron [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1141.098822] env[62204]: DEBUG oslo_vmware.api [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200513, 'name': PowerOnVM_Task, 'duration_secs': 0.485119} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.099246] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1141.099320] env[62204]: INFO nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Took 7.81 seconds to spawn the instance on the hypervisor. [ 1141.099503] env[62204]: DEBUG nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1141.100293] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c453a9-1d05-4438-ba01-5cc021bc3ed3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.343109] env[62204]: DEBUG nova.network.neutron [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68464bf7-61", "ovs_interfaceid": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.616967] env[62204]: INFO nova.compute.manager [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Took 12.50 seconds to build instance. [ 1141.846150] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1142.087627] env[62204]: DEBUG nova.compute.manager [req-97482596-9db0-41dd-8c20-65a001151126 req-6c0c5800-b896-473b-b2a4-7d94c1a29a91 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-vif-unplugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1142.087862] env[62204]: DEBUG oslo_concurrency.lockutils [req-97482596-9db0-41dd-8c20-65a001151126 req-6c0c5800-b896-473b-b2a4-7d94c1a29a91 service nova] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.088089] env[62204]: DEBUG oslo_concurrency.lockutils [req-97482596-9db0-41dd-8c20-65a001151126 req-6c0c5800-b896-473b-b2a4-7d94c1a29a91 service nova] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.088263] env[62204]: DEBUG oslo_concurrency.lockutils [req-97482596-9db0-41dd-8c20-65a001151126 req-6c0c5800-b896-473b-b2a4-7d94c1a29a91 service nova] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.088439] env[62204]: DEBUG nova.compute.manager [req-97482596-9db0-41dd-8c20-65a001151126 req-6c0c5800-b896-473b-b2a4-7d94c1a29a91 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] No waiting events found dispatching network-vif-unplugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1142.088628] env[62204]: WARNING nova.compute.manager [req-97482596-9db0-41dd-8c20-65a001151126 req-6c0c5800-b896-473b-b2a4-7d94c1a29a91 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received unexpected event network-vif-unplugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d for instance with vm_state shelved and task_state shelving_offloading. [ 1142.118756] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c7de29d7-eb7b-473d-8b76-e4e406a2aae5 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.004s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.174382] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.175332] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece5f745-955f-4afd-a46d-08352daec95a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.183308] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.183567] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98fa66c8-6364-48eb-ab02-e302201f648e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.254783] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1142.255084] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1142.255281] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleting the datastore file [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.255621] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c27e8944-1561-4497-8739-128cac94970b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.261948] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1142.261948] env[62204]: value = "task-1200515" [ 1142.261948] env[62204]: _type = "Task" [ 1142.261948] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.270276] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.638271] env[62204]: DEBUG nova.compute.manager [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Received event network-changed-cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1142.638471] env[62204]: DEBUG nova.compute.manager [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Refreshing instance network info cache due to event network-changed-cb9ecbdd-94c6-48fe-acc1-c0721410b962. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1142.638694] env[62204]: DEBUG oslo_concurrency.lockutils [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] Acquiring lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1142.638838] env[62204]: DEBUG oslo_concurrency.lockutils [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] Acquired lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.638999] env[62204]: DEBUG nova.network.neutron [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Refreshing network info cache for port cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1142.772313] env[62204]: DEBUG oslo_vmware.api [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136201} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.772578] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1142.772766] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1142.772944] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1142.794370] env[62204]: INFO nova.scheduler.client.report [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted allocations for instance 18cad772-c6f8-4797-8c03-86321ee62958 [ 1143.300162] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.300458] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.300663] env[62204]: DEBUG nova.objects.instance [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'resources' on Instance uuid 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.405684] env[62204]: DEBUG nova.network.neutron [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updated VIF entry in instance network info cache for port cb9ecbdd-94c6-48fe-acc1-c0721410b962. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1143.406116] env[62204]: DEBUG nova.network.neutron [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [{"id": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "address": "fa:16:3e:ca:ef:72", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb9ecbdd-94", "ovs_interfaceid": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.805235] env[62204]: DEBUG nova.objects.instance [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'numa_topology' on Instance uuid 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.909514] env[62204]: DEBUG oslo_concurrency.lockutils [req-8d491b49-52a0-4d29-bdd3-2f1cc7261a2c req-2e5c261e-2941-4f05-b890-2889c6144a11 service nova] Releasing lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1144.117779] env[62204]: DEBUG nova.compute.manager [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1144.117990] env[62204]: DEBUG nova.compute.manager [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing instance network info cache due to event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1144.118233] env[62204]: DEBUG oslo_concurrency.lockutils [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1144.118380] env[62204]: DEBUG oslo_concurrency.lockutils [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.118569] env[62204]: DEBUG nova.network.neutron [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1144.307924] env[62204]: DEBUG nova.objects.base [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Object Instance<18cad772-c6f8-4797-8c03-86321ee62958> lazy-loaded attributes: resources,numa_topology {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1144.352710] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f485fe40-b5b1-4488-ad78-98126db2e8d3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.360108] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d619d06b-ef02-4820-b7ed-c7424143aeec {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.389661] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846d1393-fc06-4f94-8765-95b259da5d40 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.396584] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b0bdd0-eabe-42a9-9819-7628cdd72cb8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.409464] env[62204]: DEBUG nova.compute.provider_tree [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.722641] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.844369] env[62204]: DEBUG nova.network.neutron [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updated VIF entry in instance network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1144.844744] env[62204]: DEBUG nova.network.neutron [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap68464bf7-61", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.912913] env[62204]: DEBUG nova.scheduler.client.report [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1145.347230] env[62204]: DEBUG oslo_concurrency.lockutils [req-f654606a-817f-4075-901b-8528139506a6 req-d544581c-aacf-4e64-b010-b3f3f2a2d80a service nova] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1145.417559] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.117s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.925561] env[62204]: DEBUG oslo_concurrency.lockutils [None req-f45fc8c5-841b-4407-b03c-b83580a17a61 tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.596s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.926553] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.204s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.926643] env[62204]: INFO nova.compute.manager [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Unshelving [ 1146.949842] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.950142] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.950360] env[62204]: DEBUG nova.objects.instance [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'pci_requests' on Instance uuid 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.453926] env[62204]: DEBUG nova.objects.instance [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'numa_topology' on Instance uuid 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.956988] env[62204]: INFO nova.compute.claims [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1149.015957] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49409f3d-2f72-464e-ad6f-7be7dfc5d584 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.023303] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71dd80e-4a7f-4a8c-82e8-58de55720506 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.052447] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba5b5f8-7744-4d59-87fd-252513236b90 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.059149] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02b54f3-fbda-4434-a9fb-71d55bf0fd2f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.073155] env[62204]: DEBUG nova.compute.provider_tree [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.214926] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1149.215223] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.577100] env[62204]: DEBUG nova.scheduler.client.report [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1149.718736] env[62204]: DEBUG nova.compute.utils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1150.081090] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.131s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.110581] env[62204]: INFO nova.network.neutron [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1150.222057] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.283186] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.283538] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.283722] env[62204]: INFO nova.compute.manager [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Attaching volume 6bcbecb7-7cf6-4ed2-bdb5-5229a814464a to /dev/sdb [ 1151.313620] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f8e7bd-6084-47f6-b8f6-d2da841a41c7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.320724] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920b05b1-a8d0-4dc5-b787-17b83286f643 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.334021] env[62204]: DEBUG nova.virt.block_device [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updating existing volume attachment record: 7537fa69-0052-4105-8645-acd68f972dc9 {{(pid=62204) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1151.504091] env[62204]: DEBUG nova.compute.manager [req-9564978e-afe9-43a2-86db-ea6959b366b3 req-576ddc7d-9d9c-465e-bb45-881f6d9e0787 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-vif-plugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1151.504330] env[62204]: DEBUG oslo_concurrency.lockutils [req-9564978e-afe9-43a2-86db-ea6959b366b3 req-576ddc7d-9d9c-465e-bb45-881f6d9e0787 service nova] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.504542] env[62204]: DEBUG oslo_concurrency.lockutils [req-9564978e-afe9-43a2-86db-ea6959b366b3 req-576ddc7d-9d9c-465e-bb45-881f6d9e0787 service nova] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.504714] env[62204]: DEBUG oslo_concurrency.lockutils [req-9564978e-afe9-43a2-86db-ea6959b366b3 req-576ddc7d-9d9c-465e-bb45-881f6d9e0787 service nova] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1151.504885] env[62204]: DEBUG nova.compute.manager [req-9564978e-afe9-43a2-86db-ea6959b366b3 req-576ddc7d-9d9c-465e-bb45-881f6d9e0787 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] No waiting events found dispatching network-vif-plugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1151.505066] env[62204]: WARNING nova.compute.manager [req-9564978e-afe9-43a2-86db-ea6959b366b3 req-576ddc7d-9d9c-465e-bb45-881f6d9e0787 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received unexpected event network-vif-plugged-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d for instance with vm_state shelved_offloaded and task_state spawning. [ 1151.586720] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1151.586895] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.587111] env[62204]: DEBUG nova.network.neutron [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1152.291428] env[62204]: DEBUG nova.network.neutron [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68464bf7-61", "ovs_interfaceid": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.794051] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1152.821985] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='11fcf88f4cbe25785f7c8e69dae239c4',container_format='bare',created_at=2024-10-08T23:46:34Z,direct_url=,disk_format='vmdk',id=087ecd5f-de6c-438e-aa15-8563149d3991,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1099989567-shelved',owner='d93f6aa3eaad4c5b91b657e75854f45f',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-10-08T23:46:48Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1152.822339] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1152.822566] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1152.822827] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1152.823051] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1152.823266] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1152.823553] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1152.823782] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1152.824026] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1152.824267] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1152.824515] env[62204]: DEBUG nova.virt.hardware [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1152.826104] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76d9c2b-eacd-4417-9813-b572cfa09946 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.836606] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed501f2-2ca0-4c29-9ebc-bfb7f7901b36 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.855474] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:2c:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd4cb37d4-2060-48b6-9e60-156a71fc7ee3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68464bf7-61a8-4b7f-bbd7-a546e9e3f17d', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1152.867718] env[62204]: DEBUG oslo.service.loopingcall [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.867718] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1152.868012] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c02937b5-296a-4cf9-9625-5db597054bd9 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.889162] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1152.889162] env[62204]: value = "task-1200517" [ 1152.889162] env[62204]: _type = "Task" [ 1152.889162] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.896255] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200517, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.399086] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200517, 'name': CreateVM_Task, 'duration_secs': 0.298573} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.399494] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1153.399923] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.400133] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.400550] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1153.400814] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7a9012e-a880-4956-953d-6a2bebc789cc {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.405112] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1153.405112] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52290727-26c5-61e1-d0a8-7921fd262b5b" [ 1153.405112] env[62204]: _type = "Task" [ 1153.405112] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.412308] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52290727-26c5-61e1-d0a8-7921fd262b5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.529435] env[62204]: DEBUG nova.compute.manager [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1153.529651] env[62204]: DEBUG nova.compute.manager [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing instance network info cache due to event network-changed-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1153.529891] env[62204]: DEBUG oslo_concurrency.lockutils [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] Acquiring lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.530077] env[62204]: DEBUG oslo_concurrency.lockutils [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] Acquired lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.530248] env[62204]: DEBUG nova.network.neutron [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Refreshing network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1153.914931] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1153.915304] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Processing image 087ecd5f-de6c-438e-aa15-8563149d3991 {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1153.915726] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1153.915974] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquired lock "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.916254] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1153.916550] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20860cf9-111b-4c59-967e-cd387ee4f8fd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.924521] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1153.924740] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1153.925524] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47acd025-f1b6-482e-9207-e3b1d52068ac {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.930774] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1153.930774] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202749c-d708-dde8-d7af-cb57d334b2f4" [ 1153.930774] env[62204]: _type = "Task" [ 1153.930774] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.939950] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]5202749c-d708-dde8-d7af-cb57d334b2f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.226341] env[62204]: DEBUG nova.network.neutron [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updated VIF entry in instance network info cache for port 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1154.226718] env[62204]: DEBUG nova.network.neutron [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [{"id": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "address": "fa:16:3e:8d:2c:29", "network": {"id": "6824f581-3681-423a-b421-0dd4d4bab425", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1111741391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d93f6aa3eaad4c5b91b657e75854f45f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d4cb37d4-2060-48b6-9e60-156a71fc7ee3", "external-id": "nsx-vlan-transportzone-819", "segmentation_id": 819, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68464bf7-61", "ovs_interfaceid": "68464bf7-61a8-4b7f-bbd7-a546e9e3f17d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.440722] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Preparing fetch location {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1154.441152] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Fetch image to [datastore1] OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad/OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad.vmdk {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1154.441216] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Downloading stream optimized image 087ecd5f-de6c-438e-aa15-8563149d3991 to [datastore1] OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad/OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad.vmdk on the data store datastore1 as vApp {{(pid=62204) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1154.441406] env[62204]: DEBUG nova.virt.vmwareapi.images [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Downloading image file data 087ecd5f-de6c-438e-aa15-8563149d3991 to the ESX as VM named 'OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad' {{(pid=62204) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1154.507285] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1154.507285] env[62204]: value = "resgroup-9" [ 1154.507285] env[62204]: _type = "ResourcePool" [ 1154.507285] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1154.507604] env[62204]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-61bc2490-a738-4f52-92d5-c3104993ae94 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.529185] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease: (returnval){ [ 1154.529185] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528dac37-3fcc-f4ae-8f8f-3e44aea019a4" [ 1154.529185] env[62204]: _type = "HttpNfcLease" [ 1154.529185] env[62204]: } obtained for vApp import into resource pool (val){ [ 1154.529185] env[62204]: value = "resgroup-9" [ 1154.529185] env[62204]: _type = "ResourcePool" [ 1154.529185] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1154.529469] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the lease: (returnval){ [ 1154.529469] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528dac37-3fcc-f4ae-8f8f-3e44aea019a4" [ 1154.529469] env[62204]: _type = "HttpNfcLease" [ 1154.529469] env[62204]: } to be ready. {{(pid=62204) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1154.535135] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1154.535135] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528dac37-3fcc-f4ae-8f8f-3e44aea019a4" [ 1154.535135] env[62204]: _type = "HttpNfcLease" [ 1154.535135] env[62204]: } is initializing. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1154.729705] env[62204]: DEBUG oslo_concurrency.lockutils [req-51669adb-e59e-447e-8c7d-42c53b7bbb24 req-8d41f76e-2e26-4370-be42-878550357000 service nova] Releasing lock "refresh_cache-18cad772-c6f8-4797-8c03-86321ee62958" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1155.037264] env[62204]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1155.037264] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528dac37-3fcc-f4ae-8f8f-3e44aea019a4" [ 1155.037264] env[62204]: _type = "HttpNfcLease" [ 1155.037264] env[62204]: } is ready. {{(pid=62204) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1155.037605] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1155.037605] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]528dac37-3fcc-f4ae-8f8f-3e44aea019a4" [ 1155.037605] env[62204]: _type = "HttpNfcLease" [ 1155.037605] env[62204]: }. {{(pid=62204) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1155.038264] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8858400-96f0-4eeb-a937-7beec7c0c47a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.044889] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52691855-7259-5ccb-5dd8-10e14bc48b0e/disk-0.vmdk from lease info. {{(pid=62204) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1155.045088] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52691855-7259-5ccb-5dd8-10e14bc48b0e/disk-0.vmdk. {{(pid=62204) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1155.108812] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9d09d04d-e876-42b2-a1ff-baee66b4807c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.879195] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Volume attach. Driver type: vmdk {{(pid=62204) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1155.879467] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260153', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'name': 'volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b803bcb-3d18-455e-b2fa-40fe074dfdbb', 'attached_at': '', 'detached_at': '', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'serial': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1155.880386] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c3620d-0323-4f98-95c8-5b89a1c740ce {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.901251] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f75143-5ea4-4982-ae7a-8cdf807cd96c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.929293] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a/volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1155.931828] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4790bc3d-1f47-4a4a-b58b-25495fcdd2df {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.950011] env[62204]: DEBUG oslo_vmware.api [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1155.950011] env[62204]: value = "task-1200520" [ 1155.950011] env[62204]: _type = "Task" [ 1155.950011] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.958185] env[62204]: DEBUG oslo_vmware.api [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200520, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.190667] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Completed reading data from the image iterator. {{(pid=62204) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1156.190935] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52691855-7259-5ccb-5dd8-10e14bc48b0e/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1156.191859] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782e35a7-ceb0-4869-bbfa-8e4ec46ae88c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.198466] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52691855-7259-5ccb-5dd8-10e14bc48b0e/disk-0.vmdk is in state: ready. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1156.198692] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52691855-7259-5ccb-5dd8-10e14bc48b0e/disk-0.vmdk. {{(pid=62204) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1156.198944] env[62204]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6c5aabf5-b874-4a3a-bc0c-78df66cbafd5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.398575] env[62204]: DEBUG oslo_vmware.rw_handles [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52691855-7259-5ccb-5dd8-10e14bc48b0e/disk-0.vmdk. {{(pid=62204) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1156.398839] env[62204]: INFO nova.virt.vmwareapi.images [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Downloaded image file data 087ecd5f-de6c-438e-aa15-8563149d3991 [ 1156.399678] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2824b17-0750-494e-a489-75ad609ca657 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.415513] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1409e55-eda0-4267-ae6b-838fe5ea72e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.443896] env[62204]: INFO nova.virt.vmwareapi.images [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] The imported VM was unregistered [ 1156.446311] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Caching image {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1156.446543] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Creating directory with path [datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1156.446804] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aac1017c-b949-4194-a5e2-bd00043f4106 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.456272] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Created directory with path [datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991 {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1156.456456] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad/OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad.vmdk to [datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk. {{(pid=62204) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1156.459313] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-27deb509-2716-4fb6-80eb-c54ef4ee34e5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.461178] env[62204]: DEBUG oslo_vmware.api [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200520, 'name': ReconfigVM_Task, 'duration_secs': 0.394329} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.461432] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a/volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a.vmdk or device None with type thin {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1156.466801] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec20636d-6058-45bb-8fee-fe3c30b9363a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.477556] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1156.477556] env[62204]: value = "task-1200522" [ 1156.477556] env[62204]: _type = "Task" [ 1156.477556] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.481579] env[62204]: DEBUG oslo_vmware.api [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1156.481579] env[62204]: value = "task-1200523" [ 1156.481579] env[62204]: _type = "Task" [ 1156.481579] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.487928] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200522, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.492715] env[62204]: DEBUG oslo_vmware.api [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200523, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.990346] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200522, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.995334] env[62204]: DEBUG oslo_vmware.api [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200523, 'name': ReconfigVM_Task, 'duration_secs': 0.168815} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.995694] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260153', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'name': 'volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b803bcb-3d18-455e-b2fa-40fe074dfdbb', 'attached_at': '', 'detached_at': '', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'serial': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a'} {{(pid=62204) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1157.489115] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200522, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.989462] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200522, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.035321] env[62204]: DEBUG nova.objects.instance [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'flavor' on Instance uuid 3b803bcb-3d18-455e-b2fa-40fe074dfdbb {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.491198] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200522, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.540267] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d982f260-750a-4b69-8bb9-05fbfe158ca9 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.257s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.729791] env[62204]: DEBUG oslo_concurrency.lockutils [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1158.730087] env[62204]: DEBUG oslo_concurrency.lockutils [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1158.991334] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200522, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.181877} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.991599] env[62204]: INFO nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad/OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad.vmdk to [datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk. [ 1158.991805] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Cleaning up location [datastore1] OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1158.992014] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_46548834-74ed-4f96-8dec-ca76ab4ff9ad {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1158.992281] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9afae5e0-2cbe-4422-bf8a-04df886ea7a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.999519] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1158.999519] env[62204]: value = "task-1200524" [ 1158.999519] env[62204]: _type = "Task" [ 1158.999519] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.006645] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.233510] env[62204]: INFO nova.compute.manager [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Detaching volume 6bcbecb7-7cf6-4ed2-bdb5-5229a814464a [ 1159.262993] env[62204]: INFO nova.virt.block_device [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Attempting to driver detach volume 6bcbecb7-7cf6-4ed2-bdb5-5229a814464a from mountpoint /dev/sdb [ 1159.263292] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Volume detach. Driver type: vmdk {{(pid=62204) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1159.263492] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260153', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'name': 'volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b803bcb-3d18-455e-b2fa-40fe074dfdbb', 'attached_at': '', 'detached_at': '', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'serial': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1159.264370] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7257f8-8fe1-4fd4-9916-7aa5120329b6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.287032] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-941521dc-5c1a-4a6c-a273-0358837e5b91 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.293512] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4873f94c-af4c-432d-a808-079bcfa38edf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.312738] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2962e9e-9c93-42c1-8d55-14236a98c3c8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.326374] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] The volume has not been displaced from its original location: [datastore1] volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a/volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a.vmdk. No consolidation needed. {{(pid=62204) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1159.331442] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1159.331696] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d05ac243-4186-4529-a178-2facf55409a5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.348954] env[62204]: DEBUG oslo_vmware.api [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1159.348954] env[62204]: value = "task-1200525" [ 1159.348954] env[62204]: _type = "Task" [ 1159.348954] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.355574] env[62204]: DEBUG oslo_vmware.api [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200525, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.508704] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033748} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.508931] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1159.509086] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Releasing lock "[datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.509353] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk to [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1159.509610] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-999fa2ea-c91a-4ea6-98cb-8ae4188edfc7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.515916] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1159.515916] env[62204]: value = "task-1200526" [ 1159.515916] env[62204]: _type = "Task" [ 1159.515916] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.523097] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.859261] env[62204]: DEBUG oslo_vmware.api [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200525, 'name': ReconfigVM_Task, 'duration_secs': 0.213455} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.859570] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=62204) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1159.864182] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edd554c6-8f2b-44f1-baa6-65a6a44a6ccd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.881123] env[62204]: DEBUG oslo_vmware.api [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1159.881123] env[62204]: value = "task-1200527" [ 1159.881123] env[62204]: _type = "Task" [ 1159.881123] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.889960] env[62204]: DEBUG oslo_vmware.api [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.025770] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200526, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.391452] env[62204]: DEBUG oslo_vmware.api [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200527, 'name': ReconfigVM_Task, 'duration_secs': 0.16219} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.391849] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-260153', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'name': 'volume-6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '3b803bcb-3d18-455e-b2fa-40fe074dfdbb', 'attached_at': '', 'detached_at': '', 'volume_id': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a', 'serial': '6bcbecb7-7cf6-4ed2-bdb5-5229a814464a'} {{(pid=62204) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1160.528130] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200526, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.934965] env[62204]: DEBUG nova.objects.instance [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'flavor' on Instance uuid 3b803bcb-3d18-455e-b2fa-40fe074dfdbb {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.026900] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200526, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.529126] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200526, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.943812] env[62204]: DEBUG oslo_concurrency.lockutils [None req-97721bb2-4892-4b54-92de-b5e906110ee2 tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.214s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.028646] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200526, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.336508} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.028984] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/087ecd5f-de6c-438e-aa15-8563149d3991/087ecd5f-de6c-438e-aa15-8563149d3991.vmdk to [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.029749] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c4aa6d-e79b-4d4d-a9d4-e724f7a8a772 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.050430] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1162.050667] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52ae0fe2-9ce1-4064-8f38-c53065a151b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.069041] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1162.069041] env[62204]: value = "task-1200528" [ 1162.069041] env[62204]: _type = "Task" [ 1162.069041] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.078849] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200528, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.578824] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200528, 'name': ReconfigVM_Task, 'duration_secs': 0.276868} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.579199] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958/18cad772-c6f8-4797-8c03-86321ee62958.vmdk or device None with type streamOptimized {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1162.579752] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e48c7024-74e4-4e0b-8108-2931bfa51875 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.585889] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1162.585889] env[62204]: value = "task-1200529" [ 1162.585889] env[62204]: _type = "Task" [ 1162.585889] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.593779] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200529, 'name': Rename_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.971292] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.971536] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.971757] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1162.971951] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1162.972140] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1162.974375] env[62204]: INFO nova.compute.manager [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Terminating instance [ 1162.976161] env[62204]: DEBUG nova.compute.manager [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1162.976360] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1162.977186] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411e17f7-036d-4f85-807c-78d36e239111 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.984705] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1162.984923] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbe04526-481e-428c-9c8f-66dd7a340e86 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.990583] env[62204]: DEBUG oslo_vmware.api [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1162.990583] env[62204]: value = "task-1200530" [ 1162.990583] env[62204]: _type = "Task" [ 1162.990583] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.997903] env[62204]: DEBUG oslo_vmware.api [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.095183] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200529, 'name': Rename_Task, 'duration_secs': 0.139478} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.095519] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1163.095784] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b9df96e-ff11-4e2f-9459-8c09a54e00a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.101964] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1163.101964] env[62204]: value = "task-1200531" [ 1163.101964] env[62204]: _type = "Task" [ 1163.101964] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.109955] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.501306] env[62204]: DEBUG oslo_vmware.api [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200530, 'name': PowerOffVM_Task, 'duration_secs': 0.158058} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.501578] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.501757] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.502034] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ce7f5ad-f4f2-415f-905e-539c7d015b58 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.569867] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.570148] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.570348] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleting the datastore file [datastore1] 3b803bcb-3d18-455e-b2fa-40fe074dfdbb {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.570707] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30e05fdc-b6c9-4069-b644-431ff88d2588 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.576718] env[62204]: DEBUG oslo_vmware.api [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for the task: (returnval){ [ 1163.576718] env[62204]: value = "task-1200533" [ 1163.576718] env[62204]: _type = "Task" [ 1163.576718] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.583972] env[62204]: DEBUG oslo_vmware.api [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.610513] env[62204]: DEBUG oslo_vmware.api [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200531, 'name': PowerOnVM_Task, 'duration_secs': 0.432542} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.610770] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1163.709666] env[62204]: DEBUG nova.compute.manager [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1163.710636] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780d4c81-cc4b-4e65-970e-090480da1ecb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.085994] env[62204]: DEBUG oslo_vmware.api [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Task: {'id': task-1200533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133964} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.086236] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1164.086425] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1164.086609] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1164.086789] env[62204]: INFO nova.compute.manager [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1164.087042] env[62204]: DEBUG oslo.service.loopingcall [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1164.087248] env[62204]: DEBUG nova.compute.manager [-] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1164.087343] env[62204]: DEBUG nova.network.neutron [-] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1164.226985] env[62204]: DEBUG oslo_concurrency.lockutils [None req-cf680034-75a8-491e-b101-cde75fa33b4e tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 18.300s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.716361] env[62204]: DEBUG nova.compute.manager [req-f062f1ea-5918-48c1-8880-0a7ccf58476b req-745e526a-04b1-4626-9b7c-5ea358098c04 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Received event network-vif-deleted-e991eae7-de66-4f1d-94f0-a1917eeaa5f1 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1164.716693] env[62204]: INFO nova.compute.manager [req-f062f1ea-5918-48c1-8880-0a7ccf58476b req-745e526a-04b1-4626-9b7c-5ea358098c04 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Neutron deleted interface e991eae7-de66-4f1d-94f0-a1917eeaa5f1; detaching it from the instance and deleting it from the info cache [ 1164.716984] env[62204]: DEBUG nova.network.neutron [req-f062f1ea-5918-48c1-8880-0a7ccf58476b req-745e526a-04b1-4626-9b7c-5ea358098c04 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.189538] env[62204]: DEBUG nova.network.neutron [-] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.220111] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aac76691-59b0-4091-a973-2779a80e3385 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.229304] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03ec4a2-6a15-46f6-81ac-1f7d52d4450e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.252959] env[62204]: DEBUG nova.compute.manager [req-f062f1ea-5918-48c1-8880-0a7ccf58476b req-745e526a-04b1-4626-9b7c-5ea358098c04 service nova] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Detach interface failed, port_id=e991eae7-de66-4f1d-94f0-a1917eeaa5f1, reason: Instance 3b803bcb-3d18-455e-b2fa-40fe074dfdbb could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1165.616183] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.616491] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.616712] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "18cad772-c6f8-4797-8c03-86321ee62958-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1165.616909] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1165.617091] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1165.619434] env[62204]: INFO nova.compute.manager [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Terminating instance [ 1165.621272] env[62204]: DEBUG nova.compute.manager [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1165.621468] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.622311] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d1643f-401a-4c75-a5fd-e425cc6c89d4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.630893] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.631417] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dc27024-9755-47f1-8562-b99d6e9fc07d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.636613] env[62204]: DEBUG oslo_vmware.api [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1165.636613] env[62204]: value = "task-1200534" [ 1165.636613] env[62204]: _type = "Task" [ 1165.636613] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.643831] env[62204]: DEBUG oslo_vmware.api [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.692614] env[62204]: INFO nova.compute.manager [-] [instance: 3b803bcb-3d18-455e-b2fa-40fe074dfdbb] Took 1.61 seconds to deallocate network for instance. [ 1166.147537] env[62204]: DEBUG oslo_vmware.api [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200534, 'name': PowerOffVM_Task, 'duration_secs': 0.178877} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.147873] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1166.148061] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1166.148330] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d3f74e0a-d735-41aa-ae43-5aabda91f151 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.199419] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.199678] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.199943] env[62204]: DEBUG nova.objects.instance [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lazy-loading 'resources' on Instance uuid 3b803bcb-3d18-455e-b2fa-40fe074dfdbb {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.217402] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.217662] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.217856] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleting the datastore file [datastore1] 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.218171] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-916f5d42-4c8a-4960-b970-fde863c4e16d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.225255] env[62204]: DEBUG oslo_vmware.api [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for the task: (returnval){ [ 1166.225255] env[62204]: value = "task-1200536" [ 1166.225255] env[62204]: _type = "Task" [ 1166.225255] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.233288] env[62204]: DEBUG oslo_vmware.api [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.734516] env[62204]: DEBUG oslo_vmware.api [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Task: {'id': task-1200536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129157} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.736895] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.737107] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.737293] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.737469] env[62204]: INFO nova.compute.manager [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1166.737741] env[62204]: DEBUG oslo.service.loopingcall [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1166.738805] env[62204]: DEBUG nova.compute.manager [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1166.738805] env[62204]: DEBUG nova.network.neutron [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1166.757183] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54882b1-e9a1-4755-8400-fb7688b23051 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.764104] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a060c24c-5ab1-444b-a82b-e80b186929ef {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.792931] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ab0eb1-aab2-4778-8190-19908a31c569 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.799649] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604f08e4-4889-4807-a07c-3e892f6fa98b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.814773] env[62204]: DEBUG nova.compute.provider_tree [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.184472] env[62204]: DEBUG nova.compute.manager [req-09673fd1-f537-4ea4-88d1-d7f78764c96a req-334fbee1-271b-46d8-927d-a56f95d93b93 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Received event network-vif-deleted-68464bf7-61a8-4b7f-bbd7-a546e9e3f17d {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1167.184741] env[62204]: INFO nova.compute.manager [req-09673fd1-f537-4ea4-88d1-d7f78764c96a req-334fbee1-271b-46d8-927d-a56f95d93b93 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Neutron deleted interface 68464bf7-61a8-4b7f-bbd7-a546e9e3f17d; detaching it from the instance and deleting it from the info cache [ 1167.184941] env[62204]: DEBUG nova.network.neutron [req-09673fd1-f537-4ea4-88d1-d7f78764c96a req-334fbee1-271b-46d8-927d-a56f95d93b93 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.317478] env[62204]: DEBUG nova.scheduler.client.report [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1167.660904] env[62204]: DEBUG nova.network.neutron [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.687866] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79adb6ca-41c2-4c01-8ce9-707c46b194b5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.698125] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc34f796-c272-467a-a9a9-f6bd6d1bbe3b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.721595] env[62204]: DEBUG nova.compute.manager [req-09673fd1-f537-4ea4-88d1-d7f78764c96a req-334fbee1-271b-46d8-927d-a56f95d93b93 service nova] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Detach interface failed, port_id=68464bf7-61a8-4b7f-bbd7-a546e9e3f17d, reason: Instance 18cad772-c6f8-4797-8c03-86321ee62958 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1167.822880] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.623s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.846852] env[62204]: INFO nova.scheduler.client.report [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Deleted allocations for instance 3b803bcb-3d18-455e-b2fa-40fe074dfdbb [ 1168.163539] env[62204]: INFO nova.compute.manager [-] [instance: 18cad772-c6f8-4797-8c03-86321ee62958] Took 1.43 seconds to deallocate network for instance. [ 1168.354620] env[62204]: DEBUG oslo_concurrency.lockutils [None req-d0f1c889-d4b8-4380-8cd1-5194896b219c tempest-AttachVolumeNegativeTest-305032227 tempest-AttachVolumeNegativeTest-305032227-project-member] Lock "3b803bcb-3d18-455e-b2fa-40fe074dfdbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.383s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1168.670397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1168.670740] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1168.671045] env[62204]: DEBUG nova.objects.instance [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lazy-loading 'resources' on Instance uuid 18cad772-c6f8-4797-8c03-86321ee62958 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1169.218996] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15463f8c-698b-41fb-b897-0c53a8b1faf7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.228427] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0995b0c-d6d6-4a5b-b2ac-7a689263aea8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.258428] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e27c44d-7414-4f70-a2ae-c7d584365593 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.266062] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c69a37a-dd82-4042-b5c0-69797ba10f62 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.278831] env[62204]: DEBUG nova.compute.provider_tree [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.782130] env[62204]: DEBUG nova.scheduler.client.report [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1170.286655] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.616s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1170.304097] env[62204]: INFO nova.scheduler.client.report [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Deleted allocations for instance 18cad772-c6f8-4797-8c03-86321ee62958 [ 1170.811085] env[62204]: DEBUG oslo_concurrency.lockutils [None req-9bf3f6d9-3cc5-49aa-8df9-b705954234dd tempest-ServerActionsTestOtherB-1539555713 tempest-ServerActionsTestOtherB-1539555713-project-member] Lock "18cad772-c6f8-4797-8c03-86321ee62958" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.194s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1178.346852] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.722501] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1178.725019] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1178.725019] env[62204]: DEBUG nova.compute.manager [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1178.725019] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3f0e7c-2a2f-480a-a80c-b4181839eb52 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.737609] env[62204]: DEBUG nova.compute.manager [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62204) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1178.737609] env[62204]: DEBUG nova.objects.instance [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'flavor' on Instance uuid 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.248727] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.248727] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba44fc92-f3dc-4575-bbdc-d62d8d2eb767 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.255761] env[62204]: DEBUG oslo_vmware.api [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1179.255761] env[62204]: value = "task-1200541" [ 1179.255761] env[62204]: _type = "Task" [ 1179.255761] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.262468] env[62204]: DEBUG oslo_vmware.api [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.346674] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1179.346864] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Starting heal instance info cache {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1179.347111] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Rebuilding the list of instances to heal {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1179.655877] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1179.656144] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1179.763599] env[62204]: DEBUG oslo_vmware.api [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200541, 'name': PowerOffVM_Task, 'duration_secs': 0.207437} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.763862] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1179.764058] env[62204]: DEBUG nova.compute.manager [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1179.764806] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25cd61b-baaf-438b-869f-f9ea72f754ed {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.850190] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1179.850336] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquired lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.850492] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Forcefully refreshing network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1179.850637] env[62204]: DEBUG nova.objects.instance [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lazy-loading 'info_cache' on Instance uuid 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.158868] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Starting instance... {{(pid=62204) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1180.276451] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2ece296d-6ffb-4092-966d-406fed82a29e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1180.683271] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1180.683271] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1180.685035] env[62204]: INFO nova.compute.claims [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1181.199599] env[62204]: DEBUG nova.objects.instance [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'flavor' on Instance uuid 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.600655] env[62204]: DEBUG nova.network.neutron [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [{"id": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "address": "fa:16:3e:ca:ef:72", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb9ecbdd-94", "ovs_interfaceid": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.703977] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1181.730037] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d25665-72e6-4524-b43e-aa799a93ef2a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.737824] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29927a29-ff87-4607-86de-319472219ca1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.767775] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27db396-c93f-4fe3-a293-734b9096a00e {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.776051] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57afb988-8945-4096-b532-5c70d178dd90 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.789194] env[62204]: DEBUG nova.compute.provider_tree [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.103080] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Releasing lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1182.103329] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updated the network info_cache for instance {{(pid=62204) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1182.103611] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.103786] env[62204]: DEBUG nova.network.neutron [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1182.103974] env[62204]: DEBUG nova.objects.instance [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'info_cache' on Instance uuid 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.105351] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.106058] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.106058] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.106214] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.106261] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.106388] env[62204]: DEBUG nova.compute.manager [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62204) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1182.106531] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1182.292344] env[62204]: DEBUG nova.scheduler.client.report [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1182.607629] env[62204]: DEBUG nova.objects.base [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Object Instance<471b4b93-a4b7-4b1c-8559-24a8db15b1b7> lazy-loaded attributes: flavor,info_cache {{(pid=62204) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1182.609393] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.797374] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.114s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.797917] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Start building networks asynchronously for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1182.800419] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.191s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1182.800603] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1182.800755] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62204) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1182.801615] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ed99d3-7e63-4885-a2ff-2a1b838c0778 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.810625] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc60a14-5d9c-4a47-8931-44a30691fe72 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.824310] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e8d80e-2f11-42b5-8cfa-6ccea3fb4b4a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.830635] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27be4e9d-1efd-403e-a0f5-cf89094385b7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.859433] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180676MB free_disk=156GB free_vcpus=48 pci_devices=None {{(pid=62204) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1182.859553] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1182.859728] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1183.306699] env[62204]: DEBUG nova.compute.utils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Using /dev/sd instead of None {{(pid=62204) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1183.309050] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Allocating IP information in the background. {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1183.309050] env[62204]: DEBUG nova.network.neutron [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] allocate_for_instance() {{(pid=62204) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1183.313490] env[62204]: DEBUG nova.network.neutron [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [{"id": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "address": "fa:16:3e:ca:ef:72", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb9ecbdd-94", "ovs_interfaceid": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.344229] env[62204]: DEBUG nova.policy [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3acca8347e044d384cbd7c64e984151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6ae22f9498b47508e86b56d2cb5180d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62204) authorize /opt/stack/nova/nova/policy.py:201}} [ 1183.571738] env[62204]: DEBUG nova.network.neutron [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Successfully created port: acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1183.811255] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Start building block device mappings for instance. {{(pid=62204) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1183.815979] env[62204]: DEBUG oslo_concurrency.lockutils [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1183.879421] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1183.879612] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Instance 30dac80e-e4d5-47e8-88d3-deb0933dd28b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62204) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1183.879828] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1183.880014] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62204) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1183.913675] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cc06a9-02bf-4e7f-a957-29b405d86fba {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.921128] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbea25ea-4ef0-4993-b816-22100b96d587 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.950130] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abe241f-d730-49b5-997d-64ad94f643ae {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.957148] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e905c8-5616-4419-82a9-3cc25a8d6daa {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.969700] env[62204]: DEBUG nova.compute.provider_tree [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.319733] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1184.320042] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63fb3d1e-ded8-40ec-8852-41c67c6f972a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.327555] env[62204]: DEBUG oslo_vmware.api [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1184.327555] env[62204]: value = "task-1200542" [ 1184.327555] env[62204]: _type = "Task" [ 1184.327555] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.337944] env[62204]: DEBUG oslo_vmware.api [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200542, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.472175] env[62204]: DEBUG nova.scheduler.client.report [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1184.823422] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Start spawning the instance on the hypervisor. {{(pid=62204) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1184.837610] env[62204]: DEBUG oslo_vmware.api [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200542, 'name': PowerOnVM_Task, 'duration_secs': 0.37973} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.837954] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1184.838159] env[62204]: DEBUG nova.compute.manager [None req-7ef81a48-d8c9-4668-8b57-93feeb4d80dd tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1184.838942] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae579fe-1319-4cb5-a98b-5e64e3ba12fe {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.851177] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-08T23:34:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-08T23:34:15Z,direct_url=,disk_format='vmdk',id=c0e4d3a1-f965-49e2-ab05-fbf425872dcc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='2fe8616669064856bebe874898c69d6a',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-08T23:34:16Z,virtual_size=,visibility=), allow threads: False {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1184.851418] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Flavor limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1184.851578] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Image limits 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1184.851760] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Flavor pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1184.851910] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Image pref 0:0:0 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1184.852071] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62204) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1184.852280] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1184.852439] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1184.852603] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Got 1 possible topologies {{(pid=62204) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1184.852766] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1184.852939] env[62204]: DEBUG nova.virt.hardware [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62204) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1184.853698] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79859182-f92d-4143-9b1e-117d66cb4080 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.861009] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b19eab9-9ef8-418b-b214-79f42b042e0c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.955646] env[62204]: DEBUG nova.compute.manager [req-ae0c33c4-6780-4ebd-9a7f-cb80433de238 req-a7dc936a-59f8-421c-a069-790863d97be9 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Received event network-vif-plugged-acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1184.955911] env[62204]: DEBUG oslo_concurrency.lockutils [req-ae0c33c4-6780-4ebd-9a7f-cb80433de238 req-a7dc936a-59f8-421c-a069-790863d97be9 service nova] Acquiring lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1184.956157] env[62204]: DEBUG oslo_concurrency.lockutils [req-ae0c33c4-6780-4ebd-9a7f-cb80433de238 req-a7dc936a-59f8-421c-a069-790863d97be9 service nova] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1184.956334] env[62204]: DEBUG oslo_concurrency.lockutils [req-ae0c33c4-6780-4ebd-9a7f-cb80433de238 req-a7dc936a-59f8-421c-a069-790863d97be9 service nova] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1184.956500] env[62204]: DEBUG nova.compute.manager [req-ae0c33c4-6780-4ebd-9a7f-cb80433de238 req-a7dc936a-59f8-421c-a069-790863d97be9 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] No waiting events found dispatching network-vif-plugged-acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1184.956667] env[62204]: WARNING nova.compute.manager [req-ae0c33c4-6780-4ebd-9a7f-cb80433de238 req-a7dc936a-59f8-421c-a069-790863d97be9 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Received unexpected event network-vif-plugged-acee56df-3294-444a-836f-70cb44ed2d52 for instance with vm_state building and task_state spawning. [ 1184.977184] env[62204]: DEBUG nova.compute.resource_tracker [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62204) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1184.977317] env[62204]: DEBUG oslo_concurrency.lockutils [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.117s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1185.030649] env[62204]: DEBUG nova.network.neutron [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Successfully updated port: acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.533332] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "refresh_cache-30dac80e-e4d5-47e8-88d3-deb0933dd28b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1185.533441] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquired lock "refresh_cache-30dac80e-e4d5-47e8-88d3-deb0933dd28b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.533604] env[62204]: DEBUG nova.network.neutron [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1186.064352] env[62204]: DEBUG nova.network.neutron [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Instance cache missing network info. {{(pid=62204) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1186.189038] env[62204]: DEBUG nova.network.neutron [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Updating instance_info_cache with network_info: [{"id": "acee56df-3294-444a-836f-70cb44ed2d52", "address": "fa:16:3e:05:eb:82", "network": {"id": "d13f7c84-2ce4-471c-a7e7-d62e6362fac6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-850914584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6ae22f9498b47508e86b56d2cb5180d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacee56df-32", "ovs_interfaceid": "acee56df-3294-444a-836f-70cb44ed2d52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.229456] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0c3cd2-2d6a-4ddf-8114-7f9379459a92 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.236700] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Suspending the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1186.236967] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b1f5d3f1-d9d6-4e17-a935-6971177bceff {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.243523] env[62204]: DEBUG oslo_vmware.api [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1186.243523] env[62204]: value = "task-1200543" [ 1186.243523] env[62204]: _type = "Task" [ 1186.243523] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.252422] env[62204]: DEBUG oslo_vmware.api [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200543, 'name': SuspendVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.691995] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Releasing lock "refresh_cache-30dac80e-e4d5-47e8-88d3-deb0933dd28b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1186.692528] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Instance network_info: |[{"id": "acee56df-3294-444a-836f-70cb44ed2d52", "address": "fa:16:3e:05:eb:82", "network": {"id": "d13f7c84-2ce4-471c-a7e7-d62e6362fac6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-850914584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6ae22f9498b47508e86b56d2cb5180d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacee56df-32", "ovs_interfaceid": "acee56df-3294-444a-836f-70cb44ed2d52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62204) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1186.693161] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:eb:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90328c7b-15c4-4742-805b-755248d67029', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'acee56df-3294-444a-836f-70cb44ed2d52', 'vif_model': 'vmxnet3'}] {{(pid=62204) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1186.701279] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Creating folder: Project (c6ae22f9498b47508e86b56d2cb5180d). Parent ref: group-v259933. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.701557] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dde9a8e4-78d8-4b5d-a54f-c1d8228df5cb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.714092] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Created folder: Project (c6ae22f9498b47508e86b56d2cb5180d) in parent group-v259933. [ 1186.714304] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Creating folder: Instances. Parent ref: group-v260156. {{(pid=62204) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1186.714575] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb041267-d4c5-4c7b-bf6a-8a3b59705f11 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.724263] env[62204]: INFO nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Created folder: Instances in parent group-v260156. [ 1186.724481] env[62204]: DEBUG oslo.service.loopingcall [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1186.724667] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Creating VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1186.724860] env[62204]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88eaf3c1-7d77-40ec-946b-91150f302cb4 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.743556] env[62204]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1186.743556] env[62204]: value = "task-1200546" [ 1186.743556] env[62204]: _type = "Task" [ 1186.743556] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.754134] env[62204]: DEBUG oslo_vmware.api [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200543, 'name': SuspendVM_Task} progress is 75%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.756986] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200546, 'name': CreateVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.986314] env[62204]: DEBUG nova.compute.manager [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Received event network-changed-acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1186.986527] env[62204]: DEBUG nova.compute.manager [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Refreshing instance network info cache due to event network-changed-acee56df-3294-444a-836f-70cb44ed2d52. {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1186.986747] env[62204]: DEBUG oslo_concurrency.lockutils [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] Acquiring lock "refresh_cache-30dac80e-e4d5-47e8-88d3-deb0933dd28b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1186.986897] env[62204]: DEBUG oslo_concurrency.lockutils [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] Acquired lock "refresh_cache-30dac80e-e4d5-47e8-88d3-deb0933dd28b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.987071] env[62204]: DEBUG nova.network.neutron [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Refreshing network info cache for port acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1187.256493] env[62204]: DEBUG oslo_vmware.api [-] Task: {'id': task-1200546, 'name': CreateVM_Task, 'duration_secs': 0.308722} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.259273] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Created VM on the ESX host {{(pid=62204) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.259622] env[62204]: DEBUG oslo_vmware.api [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200543, 'name': SuspendVM_Task, 'duration_secs': 0.561981} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.260285] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.260437] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.260756] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1187.261029] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Suspended the VM {{(pid=62204) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1187.261214] env[62204]: DEBUG nova.compute.manager [None req-8568b51a-db72-432d-a30b-0bedc6d7513e tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1187.261466] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e796d900-3804-4c43-9b40-b074043cab23 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.263346] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ac4502-651c-4005-8c77-e0401f22dccf {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.268333] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1187.268333] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522e533c-0c42-118b-5c94-d91e259ad818" [ 1187.268333] env[62204]: _type = "Task" [ 1187.268333] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.279462] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]522e533c-0c42-118b-5c94-d91e259ad818, 'name': SearchDatastore_Task} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.280397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1187.280397] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Processing image c0e4d3a1-f965-49e2-ab05-fbf425872dcc {{(pid=62204) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1187.280397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1187.280397] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquired lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.280615] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1187.280736] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-905e460b-2218-448b-beeb-3de5bc7c484c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.288055] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62204) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1187.288055] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62204) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1187.288538] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f561854-eb12-40a7-a3a0-0da52cf8381c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.293052] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1187.293052] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52054b1e-134e-acb8-ff2b-03023e2896de" [ 1187.293052] env[62204]: _type = "Task" [ 1187.293052] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.300344] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52054b1e-134e-acb8-ff2b-03023e2896de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.674672] env[62204]: DEBUG nova.network.neutron [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Updated VIF entry in instance network info cache for port acee56df-3294-444a-836f-70cb44ed2d52. {{(pid=62204) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1187.675120] env[62204]: DEBUG nova.network.neutron [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Updating instance_info_cache with network_info: [{"id": "acee56df-3294-444a-836f-70cb44ed2d52", "address": "fa:16:3e:05:eb:82", "network": {"id": "d13f7c84-2ce4-471c-a7e7-d62e6362fac6", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-850914584-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6ae22f9498b47508e86b56d2cb5180d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90328c7b-15c4-4742-805b-755248d67029", "external-id": "nsx-vlan-transportzone-860", "segmentation_id": 860, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapacee56df-32", "ovs_interfaceid": "acee56df-3294-444a-836f-70cb44ed2d52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.803711] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52054b1e-134e-acb8-ff2b-03023e2896de, 'name': SearchDatastore_Task, 'duration_secs': 0.007335} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.804612] env[62204]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74cd1b0b-b0f1-477e-b223-714a88d22f41 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.810350] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1187.810350] env[62204]: value = "session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52773c20-a5bc-f96a-fb97-2db9373c5b58" [ 1187.810350] env[62204]: _type = "Task" [ 1187.810350] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.819166] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52773c20-a5bc-f96a-fb97-2db9373c5b58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.178171] env[62204]: DEBUG oslo_concurrency.lockutils [req-1a23bb0a-7ddd-414b-8c82-551b7ae45a83 req-10c3a987-0b3f-4991-912d-2647026f2f0c service nova] Releasing lock "refresh_cache-30dac80e-e4d5-47e8-88d3-deb0933dd28b" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.320158] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': session[52f05f04-9707-b6a7-a7ad-d0b196dee5ba]52773c20-a5bc-f96a-fb97-2db9373c5b58, 'name': SearchDatastore_Task, 'duration_secs': 0.009297} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.320513] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Releasing lock "[datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1188.320641] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 30dac80e-e4d5-47e8-88d3-deb0933dd28b/30dac80e-e4d5-47e8-88d3-deb0933dd28b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1188.320891] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a122a851-9e4e-4394-9be1-80f767012b3c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.327956] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1188.327956] env[62204]: value = "task-1200547" [ 1188.327956] env[62204]: _type = "Task" [ 1188.327956] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.335425] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.583896] env[62204]: INFO nova.compute.manager [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Resuming [ 1188.584542] env[62204]: DEBUG nova.objects.instance [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'flavor' on Instance uuid 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1188.837960] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445436} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.838224] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/c0e4d3a1-f965-49e2-ab05-fbf425872dcc/c0e4d3a1-f965-49e2-ab05-fbf425872dcc.vmdk to [datastore1] 30dac80e-e4d5-47e8-88d3-deb0933dd28b/30dac80e-e4d5-47e8-88d3-deb0933dd28b.vmdk {{(pid=62204) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1188.838492] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Extending root virtual disk to 1048576 {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1188.838786] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c08b0c4f-5b46-4efe-876c-6b8812e8a2cd {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.846138] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1188.846138] env[62204]: value = "task-1200548" [ 1188.846138] env[62204]: _type = "Task" [ 1188.846138] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.853939] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.356239] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059645} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.356534] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Extended root virtual disk {{(pid=62204) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1189.357256] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e25b73c-e7f0-4652-8613-5b2695660015 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.379288] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 30dac80e-e4d5-47e8-88d3-deb0933dd28b/30dac80e-e4d5-47e8-88d3-deb0933dd28b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.379758] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98f51972-61a9-45d5-8710-7b8847afb4c5 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.398907] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1189.398907] env[62204]: value = "task-1200549" [ 1189.398907] env[62204]: _type = "Task" [ 1189.398907] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.406494] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.593189] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1189.593428] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquired lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.593696] env[62204]: DEBUG nova.network.neutron [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Building network info cache for instance {{(pid=62204) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1189.910722] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200549, 'name': ReconfigVM_Task, 'duration_secs': 0.277871} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.910960] env[62204]: DEBUG nova.virt.vmwareapi.volumeops [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 30dac80e-e4d5-47e8-88d3-deb0933dd28b/30dac80e-e4d5-47e8-88d3-deb0933dd28b.vmdk or device None with type sparse {{(pid=62204) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.911565] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-993b31b4-3ab8-4708-ad86-27631c5a205f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.918481] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1189.918481] env[62204]: value = "task-1200550" [ 1189.918481] env[62204]: _type = "Task" [ 1189.918481] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.926995] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200550, 'name': Rename_Task} progress is 5%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.972853] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.973047] env[62204]: DEBUG oslo_service.periodic_task [None req-2f0cd039-04f3-4d8c-9185-3119de8b9553 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62204) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1190.297155] env[62204]: DEBUG nova.network.neutron [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [{"id": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "address": "fa:16:3e:ca:ef:72", "network": {"id": "2f2d7d86-1892-4559-8e10-8827969b85c8", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-51470566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.210", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ef8dc436e4b45d0a8d50468666358e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb9ecbdd-94", "ovs_interfaceid": "cb9ecbdd-94c6-48fe-acc1-c0721410b962", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.428627] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200550, 'name': Rename_Task, 'duration_secs': 0.138119} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.428930] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Powering on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1190.429172] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57eefcfe-a4e2-4ca6-b020-e54b302d6dd7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.435400] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1190.435400] env[62204]: value = "task-1200551" [ 1190.435400] env[62204]: _type = "Task" [ 1190.435400] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.442393] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.799955] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Releasing lock "refresh_cache-471b4b93-a4b7-4b1c-8559-24a8db15b1b7" {{(pid=62204) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1190.800921] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdbb9c09-145b-48e8-8d0f-07a5414cb21f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.807512] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Resuming the VM {{(pid=62204) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1190.807803] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d68fd70-b441-488f-9b44-a8532b5a6ba1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.814082] env[62204]: DEBUG oslo_vmware.api [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1190.814082] env[62204]: value = "task-1200552" [ 1190.814082] env[62204]: _type = "Task" [ 1190.814082] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.821936] env[62204]: DEBUG oslo_vmware.api [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.945337] env[62204]: DEBUG oslo_vmware.api [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200551, 'name': PowerOnVM_Task, 'duration_secs': 0.464592} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.945645] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Powered on the VM {{(pid=62204) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1190.945892] env[62204]: INFO nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Took 6.12 seconds to spawn the instance on the hypervisor. [ 1190.946118] env[62204]: DEBUG nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1190.946901] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba556171-be60-40b6-bade-e208980a448c {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.324635] env[62204]: DEBUG oslo_vmware.api [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200552, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.463406] env[62204]: INFO nova.compute.manager [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Took 10.80 seconds to build instance. [ 1191.824488] env[62204]: DEBUG oslo_vmware.api [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200552, 'name': PowerOnVM_Task, 'duration_secs': 0.514392} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.824873] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Resumed the VM {{(pid=62204) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1191.825202] env[62204]: DEBUG nova.compute.manager [None req-c949902f-7c09-4211-8e17-8f587fbeaeaa tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Checking state {{(pid=62204) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1191.826062] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00abeeb-10fc-4939-81dd-91b625f7f94a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.966150] env[62204]: DEBUG oslo_concurrency.lockutils [None req-0c2a36e9-8310-4b97-acf2-6d94c61590a9 tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.310s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.152026] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.152319] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.152536] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1192.152739] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1192.152915] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1192.155119] env[62204]: INFO nova.compute.manager [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Terminating instance [ 1192.156879] env[62204]: DEBUG nova.compute.manager [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1192.157089] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1192.157904] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64657156-42d4-4acf-a1f6-2cb31a74a9e3 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.165572] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1192.165800] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb8557c4-9e12-4620-9cd4-925d7c1408e6 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.172018] env[62204]: DEBUG oslo_vmware.api [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1192.172018] env[62204]: value = "task-1200553" [ 1192.172018] env[62204]: _type = "Task" [ 1192.172018] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.179280] env[62204]: DEBUG oslo_vmware.api [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.683243] env[62204]: DEBUG oslo_vmware.api [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200553, 'name': PowerOffVM_Task, 'duration_secs': 0.316474} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.683535] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1192.683698] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1192.683953] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-465ad482-3789-42bd-bfc6-772fa4d9608a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.745633] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1192.745912] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Deleting contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1192.746129] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Deleting the datastore file [datastore1] 30dac80e-e4d5-47e8-88d3-deb0933dd28b {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1192.746398] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4d2a9e1-8318-4692-ab7b-6a13ec85fb07 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.752647] env[62204]: DEBUG oslo_vmware.api [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for the task: (returnval){ [ 1192.752647] env[62204]: value = "task-1200555" [ 1192.752647] env[62204]: _type = "Task" [ 1192.752647] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.760242] env[62204]: DEBUG oslo_vmware.api [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.262998] env[62204]: DEBUG oslo_vmware.api [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Task: {'id': task-1200555, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256735} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.263237] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1193.263426] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Deleted contents of the VM from datastore datastore1 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1193.263607] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1193.263784] env[62204]: INFO nova.compute.manager [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1193.264039] env[62204]: DEBUG oslo.service.loopingcall [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1193.264243] env[62204]: DEBUG nova.compute.manager [-] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1193.264337] env[62204]: DEBUG nova.network.neutron [-] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1193.485125] env[62204]: DEBUG nova.compute.manager [req-7efc44af-c539-4d7a-92b7-1364a5917804 req-5b7595ee-bf8b-47a6-ba4a-df62d83a0ab3 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Received event network-vif-deleted-acee56df-3294-444a-836f-70cb44ed2d52 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1193.485125] env[62204]: INFO nova.compute.manager [req-7efc44af-c539-4d7a-92b7-1364a5917804 req-5b7595ee-bf8b-47a6-ba4a-df62d83a0ab3 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Neutron deleted interface acee56df-3294-444a-836f-70cb44ed2d52; detaching it from the instance and deleting it from the info cache [ 1193.485125] env[62204]: DEBUG nova.network.neutron [req-7efc44af-c539-4d7a-92b7-1364a5917804 req-5b7595ee-bf8b-47a6-ba4a-df62d83a0ab3 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.542966] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.543198] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.543400] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.543582] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.543750] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.545703] env[62204]: INFO nova.compute.manager [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Terminating instance [ 1193.547392] env[62204]: DEBUG nova.compute.manager [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Start destroying the instance on the hypervisor. {{(pid=62204) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1193.547587] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Destroying instance {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1193.548422] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004eca88-956f-4c67-aae9-ddc410e28600 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.556180] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powering off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.556396] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92a14aee-3db5-4a03-99e4-f1cceaf62f60 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.562213] env[62204]: DEBUG oslo_vmware.api [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1193.562213] env[62204]: value = "task-1200556" [ 1193.562213] env[62204]: _type = "Task" [ 1193.562213] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.569696] env[62204]: DEBUG oslo_vmware.api [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.964359] env[62204]: DEBUG nova.network.neutron [-] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.987120] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7481b2d-0801-46d7-afdd-000ea128786a {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.995923] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bc4385-5dd6-4f30-8c5f-10541447cf43 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.019756] env[62204]: DEBUG nova.compute.manager [req-7efc44af-c539-4d7a-92b7-1364a5917804 req-5b7595ee-bf8b-47a6-ba4a-df62d83a0ab3 service nova] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Detach interface failed, port_id=acee56df-3294-444a-836f-70cb44ed2d52, reason: Instance 30dac80e-e4d5-47e8-88d3-deb0933dd28b could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1194.071693] env[62204]: DEBUG oslo_vmware.api [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200556, 'name': PowerOffVM_Task, 'duration_secs': 0.189976} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.071874] env[62204]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Powered off the VM {{(pid=62204) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.072091] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Unregistering the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1194.072427] env[62204]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3e29ad5-890b-4057-bf71-0e8171ea33eb {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.135141] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Unregistered the VM {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1194.135395] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Deleting contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1194.135596] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleting the datastore file [datastore2] 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.135919] env[62204]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c59bd05-2417-45d4-93b3-5672cda504e8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.142423] env[62204]: DEBUG oslo_vmware.api [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for the task: (returnval){ [ 1194.142423] env[62204]: value = "task-1200558" [ 1194.142423] env[62204]: _type = "Task" [ 1194.142423] env[62204]: } to complete. {{(pid=62204) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.149749] env[62204]: DEBUG oslo_vmware.api [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.467368] env[62204]: INFO nova.compute.manager [-] [instance: 30dac80e-e4d5-47e8-88d3-deb0933dd28b] Took 1.20 seconds to deallocate network for instance. [ 1194.654074] env[62204]: DEBUG oslo_vmware.api [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Task: {'id': task-1200558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214879} completed successfully. {{(pid=62204) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.654432] env[62204]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted the datastore file {{(pid=62204) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1194.654720] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Deleted contents of the VM from datastore datastore2 {{(pid=62204) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1194.654964] env[62204]: DEBUG nova.virt.vmwareapi.vmops [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Instance destroyed {{(pid=62204) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1194.655229] env[62204]: INFO nova.compute.manager [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1194.655556] env[62204]: DEBUG oslo.service.loopingcall [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62204) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.655825] env[62204]: DEBUG nova.compute.manager [-] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Deallocating network for instance {{(pid=62204) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1194.655947] env[62204]: DEBUG nova.network.neutron [-] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] deallocate_for_instance() {{(pid=62204) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1194.975067] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.975067] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.975067] env[62204]: DEBUG nova.objects.instance [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lazy-loading 'resources' on Instance uuid 30dac80e-e4d5-47e8-88d3-deb0933dd28b {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.509837] env[62204]: DEBUG nova.compute.manager [req-31114c4b-786a-434b-923c-b315327287c4 req-d26211a7-ab65-4bbe-b944-588b70753c44 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Received event network-vif-deleted-cb9ecbdd-94c6-48fe-acc1-c0721410b962 {{(pid=62204) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1195.510134] env[62204]: INFO nova.compute.manager [req-31114c4b-786a-434b-923c-b315327287c4 req-d26211a7-ab65-4bbe-b944-588b70753c44 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Neutron deleted interface cb9ecbdd-94c6-48fe-acc1-c0721410b962; detaching it from the instance and deleting it from the info cache [ 1195.510332] env[62204]: DEBUG nova.network.neutron [req-31114c4b-786a-434b-923c-b315327287c4 req-d26211a7-ab65-4bbe-b944-588b70753c44 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.523949] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc560f1f-2d19-4cdf-81eb-abcb28098335 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.532049] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a5aebd-1ba0-47b1-90e4-cdd299b9425f {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.563614] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f6619d-ce3d-4526-af1f-b83400b930a0 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.570355] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90c39d1-75f0-4bde-ae50-1ec0d340a4e7 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.574058] env[62204]: DEBUG nova.network.neutron [-] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Updating instance_info_cache with network_info: [] {{(pid=62204) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.585446] env[62204]: DEBUG nova.compute.provider_tree [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.015224] env[62204]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2987652b-7ce5-4b46-a620-e4de7ea0b84d {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.024350] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f75ee1-63ce-40c7-88ea-0dfb759c4d9b {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.044964] env[62204]: DEBUG nova.compute.manager [req-31114c4b-786a-434b-923c-b315327287c4 req-d26211a7-ab65-4bbe-b944-588b70753c44 service nova] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Detach interface failed, port_id=cb9ecbdd-94c6-48fe-acc1-c0721410b962, reason: Instance 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 could not be found. {{(pid=62204) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1196.077062] env[62204]: INFO nova.compute.manager [-] [instance: 471b4b93-a4b7-4b1c-8559-24a8db15b1b7] Took 1.42 seconds to deallocate network for instance. [ 1196.088459] env[62204]: DEBUG nova.scheduler.client.report [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1196.583144] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.593046] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.595280] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.012s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.595570] env[62204]: DEBUG nova.objects.instance [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lazy-loading 'resources' on Instance uuid 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 {{(pid=62204) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.610215] env[62204]: INFO nova.scheduler.client.report [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Deleted allocations for instance 30dac80e-e4d5-47e8-88d3-deb0933dd28b [ 1197.117115] env[62204]: DEBUG oslo_concurrency.lockutils [None req-be1a6f1f-045e-4387-ad77-783a4b384c4c tempest-ServersNegativeTestMultiTenantJSON-1637580779 tempest-ServersNegativeTestMultiTenantJSON-1637580779-project-member] Lock "30dac80e-e4d5-47e8-88d3-deb0933dd28b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.965s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1197.122850] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7329f856-7805-46ae-a340-955053140cc1 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.129993] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0137c9d-d3ff-4b20-9b1a-528debc3b484 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.161368] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9b2d6e-d5b8-486a-89d6-27f82cb38ab8 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.168161] env[62204]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9347e4dd-0076-44dc-82be-d6e2b2b67787 {{(pid=62204) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.181049] env[62204]: DEBUG nova.compute.provider_tree [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed in ProviderTree for provider: 92e8f362-5134-40c6-9a5c-0b8f64197972 {{(pid=62204) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.683660] env[62204]: DEBUG nova.scheduler.client.report [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Inventory has not changed for provider 92e8f362-5134-40c6-9a5c-0b8f64197972 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 156, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62204) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1198.187987] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.593s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1198.211576] env[62204]: INFO nova.scheduler.client.report [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Deleted allocations for instance 471b4b93-a4b7-4b1c-8559-24a8db15b1b7 [ 1198.723706] env[62204]: DEBUG oslo_concurrency.lockutils [None req-c3e101a4-f55b-4a75-9432-78f4a99b4711 tempest-ServerActionsTestJSON-1799023414 tempest-ServerActionsTestJSON-1799023414-project-member] Lock "471b4b93-a4b7-4b1c-8559-24a8db15b1b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.180s {{(pid=62204) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}